From 4b08701117391908c83c799d88c0dcddf814bf8c Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Wed, 13 Aug 2025 18:14:26 +0200 Subject: [PATCH 01/33] fix: standardize assertions across end2end tests --- .../Templates/AnnotationTests.cs | 64 ++-- .../EndToEndScaffold/Templates/MacroTests.cs | 149 ++++---- .../EndToEndScaffold/Templates/MySqlTests.cs | 170 +++++---- .../Templates/PostgresTests.cs | 49 ++- .../EndToEndScaffold/Templates/SqliteTests.cs | 72 ++-- .../MySqlConnectorDapperTester.generated.cs | 337 ++++++++++-------- .../MySqlConnectorTester.generated.cs | 337 ++++++++++-------- .../NpgsqlDapperTester.generated.cs | 221 +++++++----- .../EndToEndTests/NpgsqlTester.generated.cs | 221 +++++++----- .../SqliteDapperTester.generated.cs | 245 +++++++------ .../EndToEndTests/SqliteTester.generated.cs | 245 +++++++------ .../MySqlConnectorDapperTester.generated.cs | 337 ++++++++++-------- .../MySqlConnectorTester.generated.cs | 337 ++++++++++-------- .../NpgsqlDapperTester.generated.cs | 221 +++++++----- .../NpgsqlTester.generated.cs | 221 +++++++----- .../SqliteDapperTester.generated.cs | 245 +++++++------ .../SqliteTester.generated.cs | 245 +++++++------ 17 files changed, 2105 insertions(+), 1611 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/AnnotationTests.cs b/end2end/EndToEndScaffold/Templates/AnnotationTests.cs index 4bebe4e8..7d73f5d6 100644 --- a/end2end/EndToEndScaffold/Templates/AnnotationTests.cs +++ b/end2end/EndToEndScaffold/Templates/AnnotationTests.cs @@ -26,14 +26,14 @@ public async Task TestOne() { Name = {{Consts.BojackAuthor}} }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); - } + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && - x.Name.Equals(y.Name) && - x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -66,21 +66,19 @@ public async Task TestMany() Offset = 0 }); AssertSequenceEquals(expected, actual); - } - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } """ @@ -139,6 +137,20 @@ public async Task TestExecRows() Offset = 0 }); AssertSequenceEquals(expected, actual); + + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } """, }, @@ -159,14 +171,14 @@ public async Task TestExecLastId() { Id = id1 }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); - } + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && - x.Name.Equals(y.Name) && - x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, diff --git a/end2end/EndToEndScaffold/Templates/MacroTests.cs b/end2end/EndToEndScaffold/Templates/MacroTests.cs index 436d1a87..7ec53980 100644 --- a/end2end/EndToEndScaffold/Templates/MacroTests.cs +++ b/end2end/EndToEndScaffold/Templates/MacroTests.cs @@ -31,20 +31,21 @@ public async Task TestNargNull() }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSequenceEquals(expected, actual); + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -68,7 +69,21 @@ public async Task TestNargNotNull() }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -116,37 +131,24 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return - SingularEquals(x.Author{{Consts.UnknownRecordValuePlaceholder}}, y.Author{{Consts.UnknownRecordValuePlaceholder}}) && - SingularEquals(x.Book{{Consts.UnknownRecordValuePlaceholder}}, y.Book{{Consts.UnknownRecordValuePlaceholder}}); - } + AssertSequenceEquals(expected, actual); - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author{{Consts.UnknownRecordValuePlaceholder}}.Name + o.Book{{Consts.UnknownRecordValuePlaceholder}}.Name).ToList(); - y = y.OrderBy(o => o.Author{{Consts.UnknownRecordValuePlaceholder}}.Name + o.Book{{Consts.UnknownRecordValuePlaceholder}}.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && - x.Name.Equals(y.Name) && - x.Bio.Equals(y.Bio); - } + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Id, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Id)); + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Name, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Name)); + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Bio, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Bio)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.Id, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.Id)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.AuthorId, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.AuthorId)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.Name, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && - x.AuthorId.Equals(y.AuthorId) && - x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } """ }, @@ -177,21 +179,24 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } + AssertSequenceEquals(expected, actual); - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return - SingularEquals(x.Author{{Consts.UnknownRecordValuePlaceholder}}, y.Author{{Consts.UnknownRecordValuePlaceholder}}) && - SingularEquals(x.Author2{{Consts.UnknownRecordValuePlaceholder}}, y.Author2{{Consts.UnknownRecordValuePlaceholder}}); - } + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Id, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Id)); + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Name, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Name)); + Assert.That(x.Author{{Consts.UnknownRecordValuePlaceholder}}.Bio, Is.EqualTo(y.Author{{Consts.UnknownRecordValuePlaceholder}}.Bio)); + Assert.That(x.Author2{{Consts.UnknownRecordValuePlaceholder}}.Id, Is.EqualTo(y.Author2{{Consts.UnknownRecordValuePlaceholder}}.Id)); + Assert.That(x.Author2{{Consts.UnknownRecordValuePlaceholder}}.Name, Is.EqualTo(y.Author2{{Consts.UnknownRecordValuePlaceholder}}.Name)); + Assert.That(x.Author2{{Consts.UnknownRecordValuePlaceholder}}.Bio, Is.EqualTo(y.Author2{{Consts.UnknownRecordValuePlaceholder}}.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } """ }, @@ -224,22 +229,24 @@ public async Task TestPartialEmbed() { Name = {{Consts.BojackBookTitle}} }); - Assert.That(SequenceEquals(expected, actual)); - } + AssertSequenceEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - private static bool SingularEquals(QuerySql.GetAuthorsByBookNameRow x, QuerySql.GetAuthorsByBookNameRow y) - { - return x.Id.Equals(y.Id) && - x.Name.Equals(y.Name) && - x.Bio.Equals(y.Bio) && - SingularEquals(x.Book, y.Book); - } + void AssertSingularEquals(QuerySql.GetAuthorsByBookNameRow x, QuerySql.GetAuthorsByBookNameRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.Id, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.Id)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.AuthorId, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.AuthorId)); + Assert.That(x.Book{{Consts.UnknownRecordValuePlaceholder}}.Name, Is.EqualTo(y.Book{{Consts.UnknownRecordValuePlaceholder}}.Name)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } """ }, diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index 76dbcf15..dd061ae2 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -47,15 +47,19 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CLongtext = cLongtext }; var actual = await QuerySql.GetMysqlTypes(); - - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CChar, Is.EqualTo(expected.CChar)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CNchar, Is.EqualTo(expected.CNchar)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CNationalChar, Is.EqualTo(expected.CNationalChar)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CVarchar, Is.EqualTo(expected.CVarchar)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CTinytext, Is.EqualTo(expected.CTinytext)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CMediumtext, Is.EqualTo(expected.CMediumtext)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CText, Is.EqualTo(expected.CText)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CLongtext, Is.EqualTo(expected.CLongtext)); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CChar, Is.EqualTo(y.CChar)); + Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); + Assert.That(x.CNationalChar, Is.EqualTo(y.CNationalChar)); + Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); + Assert.That(x.CTinytext, Is.EqualTo(y.CTinytext)); + Assert.That(x.CMediumtext, Is.EqualTo(y.CMediumtext)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CLongtext, Is.EqualTo(y.CLongtext)); + } } """ }, @@ -100,15 +104,19 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CBigint = cBigint }; var actual = await QuerySql.GetMysqlTypes(); - - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBool, Is.EqualTo(expected.CBool)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CTinyint, Is.EqualTo(expected.CTinyint)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CMediumint, Is.EqualTo(expected.CMediumint)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CInt, Is.EqualTo(expected.CInt)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CBool, Is.EqualTo(y.CBool)); + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CTinyint, Is.EqualTo(y.CTinyint)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CMediumint, Is.EqualTo(y.CMediumint)); + Assert.That(x.CInt, Is.EqualTo(y.CInt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } """ }, @@ -149,14 +157,18 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypes(); - - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } """ }, @@ -185,10 +197,14 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypes(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CTimestamp, Is.EqualTo(expected.CTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } """ }, @@ -443,14 +459,19 @@ public async Task TestFloatingPointCopyFrom( CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypesCnt(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } """ }, @@ -486,12 +507,16 @@ public async Task TestDateTimeCopyFrom( CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypesCnt(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CDatetime, Is.EqualTo(expected.CDatetime)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CTimestamp, Is.EqualTo(expected.CTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } """ }, @@ -537,15 +562,19 @@ public async Task TestBinaryCopyFrom( CLongblob = cLongblob }; var actual = await QuerySql.GetMysqlTypesCnt(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBit, Is.EqualTo(expected.CBit)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBinary, Is.EqualTo(expected.CBinary)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CVarbinary, Is.EqualTo(expected.CVarbinary)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CTinyblob, Is.EqualTo(expected.CTinyblob)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBlob, Is.EqualTo(expected.CBlob)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CMediumblob, Is.EqualTo(expected.CMediumblob)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CLongblob, Is.EqualTo(expected.CLongblob)); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBit, Is.EqualTo(y.CBit)); + Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); + Assert.That(x.CVarbinary, Is.EqualTo(y.CVarbinary)); + Assert.That(x.CTinyblob, Is.EqualTo(y.CTinyblob)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + Assert.That(x.CMediumblob, Is.EqualTo(y.CMediumblob)); + Assert.That(x.CLongblob, Is.EqualTo(y.CLongblob)); + } } """ }, @@ -562,8 +591,8 @@ public async Task TestMySqlTransaction() var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = {{Consts.BojackId}}, Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }); - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); @@ -573,8 +602,15 @@ public async Task TestMySqlTransaction() Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -594,7 +630,7 @@ public async Task TestMySqlTransactionRollback() await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } """ }, @@ -662,14 +698,14 @@ await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs BioType = ExtendedBiosBioType.Memoir }); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - private void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } } """ }, @@ -789,13 +825,13 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs var actual = await QuerySql.GetMysqlFunctions(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - private static void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow expected, QuerySql.GetMysqlFunctionsRow actual) - { - Assert.That(actual.MaxInt, Is.EqualTo(expected.MaxInt)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } """ } diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 52cbc6c3..bb147ba7 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -79,11 +79,15 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CBigint = cBigint }; var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.CBigint, Is.EqualTo(expected.CBigint)); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } """ }, @@ -615,6 +619,7 @@ public async Task TestArrayCopyFrom( void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPostgresArrayTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); } } """ @@ -809,9 +814,8 @@ public async Task TestPostgresTransaction() var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = {{Consts.BojackId}}, Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }); - // The GetAuthor method in NpgsqlExampleGen returns QuerySql.GetAuthorRow? (nullable record struct) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); @@ -821,8 +825,15 @@ public async Task TestPostgresTransaction() Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -842,7 +853,7 @@ public async Task TestPostgresTransactionRollback() await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } """ }, @@ -873,13 +884,13 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs var actual = await QuerySql.GetPostgresFunctions(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - private static void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow expected, QuerySql.GetPostgresFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } """ }, @@ -988,9 +999,9 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) { - if (x.CXml == null && y.CXml == null) - return; - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); } } """ diff --git a/end2end/EndToEndScaffold/Templates/SqliteTests.cs b/end2end/EndToEndScaffold/Templates/SqliteTests.cs index deddb5aa..ae627b5e 100644 --- a/end2end/EndToEndScaffold/Templates/SqliteTests.cs +++ b/end2end/EndToEndScaffold/Templates/SqliteTests.cs @@ -36,14 +36,14 @@ await QuerySql.InsertSqliteTypes(new QuerySql.InsertSqliteTypesArgs }; var actual = await QuerySql.GetSqliteTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - private static void AssertSingularEquals(QuerySql.GetSqliteTypesRow expected, QuerySql.GetSqliteTypesRow actual) - { - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); + void AssertSingularEquals(QuerySql.GetSqliteTypesRow x, QuerySql.GetSqliteTypesRow y) + { + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + } } """ }, @@ -78,14 +78,14 @@ public async Task TestCopyFrom( }; var actual = await QuerySql.GetSqliteTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow expected, QuerySql.GetSqliteTypesCntRow actual) - { - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); + + void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow x, QuerySql.GetSqliteTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + } } """ }, @@ -102,9 +102,8 @@ public async Task TestSqliteTransaction() var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = {{Consts.BojackId}}, Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }); - // The GetAuthor method in SqliteExampleGen returns QuerySql.GetAuthorRow? (nullable record struct/class) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + ClassicAssert.IsNull(actual); transaction.Commit(); @@ -114,8 +113,15 @@ public async Task TestSqliteTransaction() Name = {{Consts.BojackAuthor}}, Bio = {{Consts.BojackTheme}} }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ }, @@ -135,7 +141,7 @@ public async Task TestSqliteTransactionRollback() transaction.Rollback(); var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = {{Consts.BojackAuthor}} }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } """ }, @@ -165,13 +171,13 @@ await QuerySql.InsertSqliteTypes(new QuerySql.InsertSqliteTypesArgs }; var actual = await QuerySql.GetSqliteFunctions(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - } - private static void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow expected, QuerySql.GetSqliteFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxReal, Is.EqualTo(expected.MaxReal)); - Assert.That(actual.MaxText, Is.EqualTo(expected.MaxText)); + void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow x, QuerySql.GetSqliteFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxReal, Is.EqualTo(y.MaxReal)); + Assert.That(x.MaxText, Is.EqualTo(y.MaxText)); + } } """ }, @@ -193,12 +199,14 @@ public async Task TestGetAuthorByIdWithMultipleNamedParam() IdArg = {{Consts.BojackId}}, Take = 1 }); - Assert.That(SingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}})); - } + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - private static bool SingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } """ } diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index bbd69996..0f96ece5 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -24,12 +24,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -54,21 +55,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -106,6 +104,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -119,12 +130,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -151,19 +163,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -207,31 +223,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -273,21 +281,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -305,7 +312,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -326,14 +346,18 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CLongtext = cLongtext }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CChar, Is.EqualTo(expected.CChar)); - Assert.That(actual.CNchar, Is.EqualTo(expected.CNchar)); - Assert.That(actual.CNationalChar, Is.EqualTo(expected.CNationalChar)); - Assert.That(actual.CVarchar, Is.EqualTo(expected.CVarchar)); - Assert.That(actual.CTinytext, Is.EqualTo(expected.CTinytext)); - Assert.That(actual.CMediumtext, Is.EqualTo(expected.CMediumtext)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CLongtext, Is.EqualTo(expected.CLongtext)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CChar, Is.EqualTo(y.CChar)); + Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); + Assert.That(x.CNationalChar, Is.EqualTo(y.CNationalChar)); + Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); + Assert.That(x.CTinytext, Is.EqualTo(y.CTinytext)); + Assert.That(x.CMediumtext, Is.EqualTo(y.CMediumtext)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CLongtext, Is.EqualTo(y.CLongtext)); + } } [Test] @@ -354,14 +378,18 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CBigint = cBigint }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CBool, Is.EqualTo(expected.CBool)); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CTinyint, Is.EqualTo(expected.CTinyint)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CMediumint, Is.EqualTo(expected.CMediumint)); - Assert.That(actual.CInt, Is.EqualTo(expected.CInt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CBool, Is.EqualTo(y.CBool)); + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CTinyint, Is.EqualTo(y.CTinyint)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CMediumint, Is.EqualTo(y.CMediumint)); + Assert.That(x.CInt, Is.EqualTo(y.CInt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -372,8 +400,8 @@ public async Task TestMySqlTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -381,8 +409,14 @@ public async Task TestMySqlTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -395,7 +429,7 @@ public async Task TestMySqlTransactionRollback() await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -415,13 +449,17 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -437,9 +475,13 @@ public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -507,13 +549,12 @@ public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTim }; var actual = await QuerySql.GetMysqlFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow expected, QuerySql.GetMysqlFunctionsRow actual) - { - Assert.That(actual.MaxInt, Is.EqualTo(expected.MaxInt)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } [Test] @@ -533,14 +574,13 @@ public async Task TestMySqlScopedSchemaEnum() }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); AssertSingularEquals(expected, actual); - } - - private void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } } [Test] @@ -689,13 +729,18 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -714,11 +759,15 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CDatetime, Is.EqualTo(expected.CDatetime)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -741,14 +790,18 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CLongblob = cLongblob }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CBit, Is.EqualTo(expected.CBit)); - Assert.That(actual.CBinary, Is.EqualTo(expected.CBinary)); - Assert.That(actual.CVarbinary, Is.EqualTo(expected.CVarbinary)); - Assert.That(actual.CTinyblob, Is.EqualTo(expected.CTinyblob)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); - Assert.That(actual.CMediumblob, Is.EqualTo(expected.CMediumblob)); - Assert.That(actual.CLongblob, Is.EqualTo(expected.CLongblob)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBit, Is.EqualTo(y.CBit)); + Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); + Assert.That(x.CVarbinary, Is.EqualTo(y.CVarbinary)); + Assert.That(x.CTinyblob, Is.EqualTo(y.CTinyblob)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + Assert.That(x.CMediumblob, Is.EqualTo(y.CMediumblob)); + Assert.That(x.CLongblob, Is.EqualTo(y.CLongblob)); + } } [Test] diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 97605d41..7a645979 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -24,12 +24,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -54,21 +55,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -106,6 +104,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -119,12 +130,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -151,19 +163,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Author2.Value, y.Author2.Value); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Author2.Value.Id, Is.EqualTo(y.Author2.Value.Id)); + Assert.That(x.Author2.Value.Name, Is.EqualTo(y.Author2.Value.Name)); + Assert.That(x.Author2.Value.Bio, Is.EqualTo(y.Author2.Value.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -207,31 +223,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Book.Value, y.Book.Value); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - y = y.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Book.Value.Id, Is.EqualTo(y.Book.Value.Id)); + Assert.That(x.Book.Value.AuthorId, Is.EqualTo(y.Book.Value.AuthorId)); + Assert.That(x.Book.Value.Name, Is.EqualTo(y.Book.Value.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -273,21 +281,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -305,7 +312,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -326,14 +346,18 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CLongtext = cLongtext }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.Value.CChar, Is.EqualTo(expected.CChar)); - Assert.That(actual.Value.CNchar, Is.EqualTo(expected.CNchar)); - Assert.That(actual.Value.CNationalChar, Is.EqualTo(expected.CNationalChar)); - Assert.That(actual.Value.CVarchar, Is.EqualTo(expected.CVarchar)); - Assert.That(actual.Value.CTinytext, Is.EqualTo(expected.CTinytext)); - Assert.That(actual.Value.CMediumtext, Is.EqualTo(expected.CMediumtext)); - Assert.That(actual.Value.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.Value.CLongtext, Is.EqualTo(expected.CLongtext)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CChar, Is.EqualTo(y.CChar)); + Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); + Assert.That(x.CNationalChar, Is.EqualTo(y.CNationalChar)); + Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); + Assert.That(x.CTinytext, Is.EqualTo(y.CTinytext)); + Assert.That(x.CMediumtext, Is.EqualTo(y.CMediumtext)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CLongtext, Is.EqualTo(y.CLongtext)); + } } [Test] @@ -354,14 +378,18 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CBigint = cBigint }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.Value.CBool, Is.EqualTo(expected.CBool)); - Assert.That(actual.Value.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.Value.CTinyint, Is.EqualTo(expected.CTinyint)); - Assert.That(actual.Value.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.Value.CMediumint, Is.EqualTo(expected.CMediumint)); - Assert.That(actual.Value.CInt, Is.EqualTo(expected.CInt)); - Assert.That(actual.Value.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.Value.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CBool, Is.EqualTo(y.CBool)); + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CTinyint, Is.EqualTo(y.CTinyint)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CMediumint, Is.EqualTo(y.CMediumint)); + Assert.That(x.CInt, Is.EqualTo(y.CInt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -372,8 +400,8 @@ public async Task TestMySqlTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -381,8 +409,14 @@ public async Task TestMySqlTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -395,7 +429,7 @@ public async Task TestMySqlTransactionRollback() await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -415,13 +449,17 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.Value.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.Value.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.Value.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.Value.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.Value.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.Value.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.Value.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -437,9 +475,13 @@ public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.Value.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.Value.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.Value.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -507,13 +549,12 @@ public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTim }; var actual = await QuerySql.GetMysqlFunctions(); AssertSingularEquals(expected, actual.Value); - } - - private static void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow expected, QuerySql.GetMysqlFunctionsRow actual) - { - Assert.That(actual.MaxInt, Is.EqualTo(expected.MaxInt)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } [Test] @@ -533,14 +574,13 @@ public async Task TestMySqlScopedSchemaEnum() }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); AssertSingularEquals(expected, actual.Value); - } - - private void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } } [Test] @@ -689,13 +729,18 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Value.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.Value.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.Value.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.Value.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.Value.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.Value.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.Value.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -714,11 +759,15 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Value.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.Value.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.Value.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.Value.CDatetime, Is.EqualTo(expected.CDatetime)); - Assert.That(actual.Value.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -741,14 +790,18 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CLongblob = cLongblob }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Value.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.Value.CBit, Is.EqualTo(expected.CBit)); - Assert.That(actual.Value.CBinary, Is.EqualTo(expected.CBinary)); - Assert.That(actual.Value.CVarbinary, Is.EqualTo(expected.CVarbinary)); - Assert.That(actual.Value.CTinyblob, Is.EqualTo(expected.CTinyblob)); - Assert.That(actual.Value.CBlob, Is.EqualTo(expected.CBlob)); - Assert.That(actual.Value.CMediumblob, Is.EqualTo(expected.CMediumblob)); - Assert.That(actual.Value.CLongblob, Is.EqualTo(expected.CLongblob)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBit, Is.EqualTo(y.CBit)); + Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); + Assert.That(x.CVarbinary, Is.EqualTo(y.CVarbinary)); + Assert.That(x.CTinyblob, Is.EqualTo(y.CTinyblob)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + Assert.That(x.CMediumblob, Is.EqualTo(y.CMediumblob)); + Assert.That(x.CLongblob, Is.EqualTo(y.CLongblob)); + } } [Test] diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 7acfb102..26a58db6 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -28,12 +28,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -58,21 +59,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -110,6 +108,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -123,12 +134,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -155,19 +167,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -211,31 +227,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -259,21 +267,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -291,7 +298,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -334,10 +354,14 @@ public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int c CBigint = cBigint }; var actual = await QuerySql.GetPostgresTypes(); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -445,13 +469,12 @@ public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, }; var actual = await QuerySql.GetPostgresFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow expected, QuerySql.GetPostgresFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } private static IEnumerable PostgresGuidDataTypesTestCases @@ -517,9 +540,8 @@ public async Task TestPostgresTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in NpgsqlExampleGen returns QuerySql.GetAuthorRow? (nullable record struct) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -527,8 +549,14 @@ public async Task TestPostgresTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -541,7 +569,7 @@ public async Task TestPostgresTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -712,6 +740,7 @@ public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPostgresArrayTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); } } @@ -883,9 +912,9 @@ public async Task TestPostgresXmlDataTypes(string cXml) AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) { - if (x.CXml == null && y.CXml == null) - return; - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); } } diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 0bf4630a..9ce57c51 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -28,12 +28,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -58,21 +59,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -110,6 +108,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -123,12 +134,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -155,19 +167,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Author2.Value, y.Author2.Value); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Author2.Value.Id, Is.EqualTo(y.Author2.Value.Id)); + Assert.That(x.Author2.Value.Name, Is.EqualTo(y.Author2.Value.Name)); + Assert.That(x.Author2.Value.Bio, Is.EqualTo(y.Author2.Value.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -211,31 +227,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Book.Value, y.Book.Value); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - y = y.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Book.Value.Id, Is.EqualTo(y.Book.Value.Id)); + Assert.That(x.Book.Value.AuthorId, Is.EqualTo(y.Book.Value.AuthorId)); + Assert.That(x.Book.Value.Name, Is.EqualTo(y.Book.Value.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -259,21 +267,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -291,7 +298,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -334,10 +354,14 @@ public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int c CBigint = cBigint }; var actual = await QuerySql.GetPostgresTypes(); - Assert.That(actual.Value.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.Value.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.Value.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.Value.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -445,13 +469,12 @@ public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, }; var actual = await QuerySql.GetPostgresFunctions(); AssertSingularEquals(expected, actual.Value); - } - - private static void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow expected, QuerySql.GetPostgresFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } private static IEnumerable PostgresGuidDataTypesTestCases @@ -517,9 +540,8 @@ public async Task TestPostgresTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in NpgsqlExampleGen returns QuerySql.GetAuthorRow? (nullable record struct) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -527,8 +549,14 @@ public async Task TestPostgresTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -541,7 +569,7 @@ public async Task TestPostgresTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -712,6 +740,7 @@ public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPostgresArrayTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); } } @@ -883,9 +912,9 @@ public async Task TestPostgresXmlDataTypes(string cXml) AssertSingularEquals(expected, actual.Value); void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) { - if (x.CXml == null && y.CXml == null) - return; - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); } } diff --git a/end2end/EndToEndTests/SqliteDapperTester.generated.cs b/end2end/EndToEndTests/SqliteDapperTester.generated.cs index fd786025..a42aa61b 100644 --- a/end2end/EndToEndTests/SqliteDapperTester.generated.cs +++ b/end2end/EndToEndTests/SqliteDapperTester.generated.cs @@ -23,12 +23,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -53,21 +54,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -105,6 +103,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -118,12 +129,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -150,19 +162,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -206,31 +222,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -272,21 +280,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -304,7 +311,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -323,14 +343,13 @@ public async Task TestSqliteTypes(int? cInteger, decimal? cReal, string cText, b }; var actual = await QuerySql.GetSqliteTypes(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesRow expected, QuerySql.GetSqliteTypesRow actual) - { - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); + void AssertSingularEquals(QuerySql.GetSqliteTypesRow x, QuerySql.GetSqliteTypesRow y) + { + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + } } [Test] @@ -344,12 +363,13 @@ public async Task TestGetAuthorByIdWithMultipleNamedParam() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthorByIdWithMultipleNamedParam(new QuerySql.GetAuthorByIdWithMultipleNamedParamArgs { IdArg = 1111, Take = 1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -366,13 +386,12 @@ public async Task TestSqliteDataTypesOverride(int? cInteger, decimal cReal, stri }; var actual = await QuerySql.GetSqliteFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow expected, QuerySql.GetSqliteFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxReal, Is.EqualTo(expected.MaxReal)); - Assert.That(actual.MaxText, Is.EqualTo(expected.MaxText)); + void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow x, QuerySql.GetSqliteFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxReal, Is.EqualTo(y.MaxReal)); + Assert.That(x.MaxText, Is.EqualTo(y.MaxText)); + } } [Test] @@ -392,14 +411,13 @@ public async Task TestCopyFrom(int batchSize, int? cInteger, decimal? cReal, str }; var actual = await QuerySql.GetSqliteTypesCnt(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow expected, QuerySql.GetSqliteTypesCntRow actual) - { - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); + void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow x, QuerySql.GetSqliteTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + } } [Test] @@ -410,9 +428,8 @@ public async Task TestSqliteTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in SqliteExampleGen returns QuerySql.GetAuthorRow? (nullable record struct/class) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); transaction.Commit(); var expected = new QuerySql.GetAuthorRow { @@ -420,8 +437,14 @@ public async Task TestSqliteTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -434,7 +457,7 @@ public async Task TestSqliteTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); transaction.Rollback(); var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } } } diff --git a/end2end/EndToEndTests/SqliteTester.generated.cs b/end2end/EndToEndTests/SqliteTester.generated.cs index cd1fc078..61f62be7 100644 --- a/end2end/EndToEndTests/SqliteTester.generated.cs +++ b/end2end/EndToEndTests/SqliteTester.generated.cs @@ -23,12 +23,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -53,21 +54,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -105,6 +103,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -118,12 +129,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -150,19 +162,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Author2.Value, y.Author2.Value); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Author2.Value.Id, Is.EqualTo(y.Author2.Value.Id)); + Assert.That(x.Author2.Value.Name, Is.EqualTo(y.Author2.Value.Name)); + Assert.That(x.Author2.Value.Bio, Is.EqualTo(y.Author2.Value.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -206,31 +222,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author.Value, y.Author.Value) && SingularEquals(x.Book.Value, y.Book.Value); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - y = y.OrderBy(o => o.Author.Value.Name + o.Book.Value.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Value.Id, Is.EqualTo(y.Author.Value.Id)); + Assert.That(x.Author.Value.Name, Is.EqualTo(y.Author.Value.Name)); + Assert.That(x.Author.Value.Bio, Is.EqualTo(y.Author.Value.Bio)); + Assert.That(x.Book.Value.Id, Is.EqualTo(y.Book.Value.Id)); + Assert.That(x.Book.Value.AuthorId, Is.EqualTo(y.Book.Value.AuthorId)); + Assert.That(x.Book.Value.Name, Is.EqualTo(y.Book.Value.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -272,21 +280,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -304,7 +311,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -323,14 +343,13 @@ public async Task TestSqliteTypes(int? cInteger, decimal? cReal, string cText, b }; var actual = await QuerySql.GetSqliteTypes(); AssertSingularEquals(expected, actual.Value); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesRow expected, QuerySql.GetSqliteTypesRow actual) - { - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); + void AssertSingularEquals(QuerySql.GetSqliteTypesRow x, QuerySql.GetSqliteTypesRow y) + { + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + } } [Test] @@ -344,12 +363,13 @@ public async Task TestGetAuthorByIdWithMultipleNamedParam() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthorByIdWithMultipleNamedParam(new QuerySql.GetAuthorByIdWithMultipleNamedParamArgs { IdArg = 1111, Take = 1 }); - Assert.That(SingularEquals(expected, actual.Value)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -366,13 +386,12 @@ public async Task TestSqliteDataTypesOverride(int? cInteger, decimal cReal, stri }; var actual = await QuerySql.GetSqliteFunctions(); AssertSingularEquals(expected, actual.Value); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow expected, QuerySql.GetSqliteFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxReal, Is.EqualTo(expected.MaxReal)); - Assert.That(actual.MaxText, Is.EqualTo(expected.MaxText)); + void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow x, QuerySql.GetSqliteFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxReal, Is.EqualTo(y.MaxReal)); + Assert.That(x.MaxText, Is.EqualTo(y.MaxText)); + } } [Test] @@ -392,14 +411,13 @@ public async Task TestCopyFrom(int batchSize, int? cInteger, decimal? cReal, str }; var actual = await QuerySql.GetSqliteTypesCnt(); AssertSingularEquals(expected, actual.Value); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow expected, QuerySql.GetSqliteTypesCntRow actual) - { - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); + void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow x, QuerySql.GetSqliteTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + } } [Test] @@ -410,9 +428,8 @@ public async Task TestSqliteTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in SqliteExampleGen returns QuerySql.GetAuthorRow? (nullable record struct/class) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); transaction.Commit(); var expected = new QuerySql.GetAuthorRow { @@ -420,8 +437,14 @@ public async Task TestSqliteTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual.Value)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -434,7 +457,7 @@ public async Task TestSqliteTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); transaction.Rollback(); var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 02515ada..87e9c1f5 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -24,12 +24,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -54,21 +55,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -106,6 +104,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -119,12 +130,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -151,19 +163,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -207,31 +223,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -273,21 +281,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -305,7 +312,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -326,14 +346,18 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CLongtext = cLongtext }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CChar, Is.EqualTo(expected.CChar)); - Assert.That(actual.CNchar, Is.EqualTo(expected.CNchar)); - Assert.That(actual.CNationalChar, Is.EqualTo(expected.CNationalChar)); - Assert.That(actual.CVarchar, Is.EqualTo(expected.CVarchar)); - Assert.That(actual.CTinytext, Is.EqualTo(expected.CTinytext)); - Assert.That(actual.CMediumtext, Is.EqualTo(expected.CMediumtext)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CLongtext, Is.EqualTo(expected.CLongtext)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CChar, Is.EqualTo(y.CChar)); + Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); + Assert.That(x.CNationalChar, Is.EqualTo(y.CNationalChar)); + Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); + Assert.That(x.CTinytext, Is.EqualTo(y.CTinytext)); + Assert.That(x.CMediumtext, Is.EqualTo(y.CMediumtext)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CLongtext, Is.EqualTo(y.CLongtext)); + } } [Test] @@ -354,14 +378,18 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CBigint = cBigint }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CBool, Is.EqualTo(expected.CBool)); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CTinyint, Is.EqualTo(expected.CTinyint)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CMediumint, Is.EqualTo(expected.CMediumint)); - Assert.That(actual.CInt, Is.EqualTo(expected.CInt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CBool, Is.EqualTo(y.CBool)); + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CTinyint, Is.EqualTo(y.CTinyint)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CMediumint, Is.EqualTo(y.CMediumint)); + Assert.That(x.CInt, Is.EqualTo(y.CInt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -372,8 +400,8 @@ public async Task TestMySqlTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -381,8 +409,14 @@ public async Task TestMySqlTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -395,7 +429,7 @@ public async Task TestMySqlTransactionRollback() await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -415,13 +449,17 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -437,9 +475,13 @@ public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -507,13 +549,12 @@ public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTim }; var actual = await QuerySql.GetMysqlFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow expected, QuerySql.GetMysqlFunctionsRow actual) - { - Assert.That(actual.MaxInt, Is.EqualTo(expected.MaxInt)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } [Test] @@ -533,14 +574,13 @@ public async Task TestMySqlScopedSchemaEnum() }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); AssertSingularEquals(expected, actual); - } - - private void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } } [Test] @@ -689,13 +729,18 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -714,11 +759,15 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CDatetime, Is.EqualTo(expected.CDatetime)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -741,14 +790,18 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CLongblob = cLongblob }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CBit, Is.EqualTo(expected.CBit)); - Assert.That(actual.CBinary, Is.EqualTo(expected.CBinary)); - Assert.That(actual.CVarbinary, Is.EqualTo(expected.CVarbinary)); - Assert.That(actual.CTinyblob, Is.EqualTo(expected.CTinyblob)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); - Assert.That(actual.CMediumblob, Is.EqualTo(expected.CMediumblob)); - Assert.That(actual.CLongblob, Is.EqualTo(expected.CLongblob)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBit, Is.EqualTo(y.CBit)); + Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); + Assert.That(x.CVarbinary, Is.EqualTo(y.CVarbinary)); + Assert.That(x.CTinyblob, Is.EqualTo(y.CTinyblob)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + Assert.That(x.CMediumblob, Is.EqualTo(y.CMediumblob)); + Assert.That(x.CLongblob, Is.EqualTo(y.CLongblob)); + } } [Test] diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index b47d9870..3f73f58a 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -24,12 +24,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -54,21 +55,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -106,6 +104,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -119,12 +130,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -151,19 +163,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -207,31 +223,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -273,21 +281,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -305,7 +312,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -326,14 +346,18 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CLongtext = cLongtext }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CChar, Is.EqualTo(expected.CChar)); - Assert.That(actual.CNchar, Is.EqualTo(expected.CNchar)); - Assert.That(actual.CNationalChar, Is.EqualTo(expected.CNationalChar)); - Assert.That(actual.CVarchar, Is.EqualTo(expected.CVarchar)); - Assert.That(actual.CTinytext, Is.EqualTo(expected.CTinytext)); - Assert.That(actual.CMediumtext, Is.EqualTo(expected.CMediumtext)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CLongtext, Is.EqualTo(expected.CLongtext)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CChar, Is.EqualTo(y.CChar)); + Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); + Assert.That(x.CNationalChar, Is.EqualTo(y.CNationalChar)); + Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); + Assert.That(x.CTinytext, Is.EqualTo(y.CTinytext)); + Assert.That(x.CMediumtext, Is.EqualTo(y.CMediumtext)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CLongtext, Is.EqualTo(y.CLongtext)); + } } [Test] @@ -354,14 +378,18 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CBigint = cBigint }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CBool, Is.EqualTo(expected.CBool)); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CTinyint, Is.EqualTo(expected.CTinyint)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CMediumint, Is.EqualTo(expected.CMediumint)); - Assert.That(actual.CInt, Is.EqualTo(expected.CInt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CBool, Is.EqualTo(y.CBool)); + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CTinyint, Is.EqualTo(y.CTinyint)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CMediumint, Is.EqualTo(y.CMediumint)); + Assert.That(x.CInt, Is.EqualTo(y.CInt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -372,8 +400,8 @@ public async Task TestMySqlTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -381,8 +409,14 @@ public async Task TestMySqlTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -395,7 +429,7 @@ public async Task TestMySqlTransactionRollback() await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -415,13 +449,17 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -437,9 +475,13 @@ public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypes(); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + { + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -507,13 +549,12 @@ public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTim }; var actual = await QuerySql.GetMysqlFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow expected, QuerySql.GetMysqlFunctionsRow actual) - { - Assert.That(actual.MaxInt, Is.EqualTo(expected.MaxInt)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } [Test] @@ -533,14 +574,13 @@ public async Task TestMySqlScopedSchemaEnum() }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); AssertSingularEquals(expected, actual); - } - - private void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } } [Test] @@ -689,13 +729,18 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDoublePrecision = cDoublePrecision }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.CFloat, Is.EqualTo(expected.CFloat)); - Assert.That(actual.CNumeric, Is.EqualTo(expected.CNumeric)); - Assert.That(actual.CDecimal, Is.EqualTo(expected.CDecimal)); - Assert.That(actual.CDec, Is.EqualTo(expected.CDec)); - Assert.That(actual.CFixed, Is.EqualTo(expected.CFixed)); - Assert.That(actual.CDouble, Is.EqualTo(expected.CDouble)); - Assert.That(actual.CDoublePrecision, Is.EqualTo(expected.CDoublePrecision)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); + Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); + Assert.That(x.CDecimal, Is.EqualTo(y.CDecimal)); + Assert.That(x.CDec, Is.EqualTo(y.CDec)); + Assert.That(x.CFixed, Is.EqualTo(y.CFixed)); + Assert.That(x.CDouble, Is.EqualTo(y.CDouble)); + Assert.That(x.CDoublePrecision, Is.EqualTo(y.CDoublePrecision)); + } } [Test] @@ -714,11 +759,15 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CTimestamp = cTimestamp }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CYear, Is.EqualTo(expected.CYear)); - Assert.That(actual.CDate, Is.EqualTo(expected.CDate)); - Assert.That(actual.CDatetime, Is.EqualTo(expected.CDatetime)); - Assert.That(actual.CTimestamp, Is.EqualTo(expected.CTimestamp)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CYear, Is.EqualTo(y.CYear)); + Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); + Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + } } [Test] @@ -741,14 +790,18 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CLongblob = cLongblob }; var actual = await QuerySql.GetMysqlTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CBit, Is.EqualTo(expected.CBit)); - Assert.That(actual.CBinary, Is.EqualTo(expected.CBinary)); - Assert.That(actual.CVarbinary, Is.EqualTo(expected.CVarbinary)); - Assert.That(actual.CTinyblob, Is.EqualTo(expected.CTinyblob)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); - Assert.That(actual.CMediumblob, Is.EqualTo(expected.CMediumblob)); - Assert.That(actual.CLongblob, Is.EqualTo(expected.CLongblob)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBit, Is.EqualTo(y.CBit)); + Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); + Assert.That(x.CVarbinary, Is.EqualTo(y.CVarbinary)); + Assert.That(x.CTinyblob, Is.EqualTo(y.CTinyblob)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + Assert.That(x.CMediumblob, Is.EqualTo(y.CMediumblob)); + Assert.That(x.CLongblob, Is.EqualTo(y.CLongblob)); + } } [Test] diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index c96eab8b..28730b12 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -28,12 +28,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -58,21 +59,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -110,6 +108,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -123,12 +134,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -155,19 +167,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -211,31 +227,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -259,21 +267,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -291,7 +298,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -334,10 +354,14 @@ public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int c CBigint = cBigint }; var actual = await QuerySql.GetPostgresTypes(); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -445,13 +469,12 @@ public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, }; var actual = await QuerySql.GetPostgresFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow expected, QuerySql.GetPostgresFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } private static IEnumerable PostgresGuidDataTypesTestCases @@ -517,9 +540,8 @@ public async Task TestPostgresTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in NpgsqlExampleGen returns QuerySql.GetAuthorRow? (nullable record struct) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -527,8 +549,14 @@ public async Task TestPostgresTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -541,7 +569,7 @@ public async Task TestPostgresTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -712,6 +740,7 @@ public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPostgresArrayTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); } } @@ -883,9 +912,9 @@ public async Task TestPostgresXmlDataTypes(string cXml) AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) { - if (x.CXml == null && y.CXml == null) - return; - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); } } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 233f3dcd..02281d4c 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -28,12 +28,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -58,21 +59,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -110,6 +108,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -123,12 +134,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -155,19 +167,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -211,31 +227,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -259,21 +267,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -291,7 +298,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -334,10 +354,14 @@ public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int c CBigint = cBigint }; var actual = await QuerySql.GetPostgresTypes(); - Assert.That(actual.CBoolean, Is.EqualTo(expected.CBoolean)); - Assert.That(actual.CSmallint, Is.EqualTo(expected.CSmallint)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CBigint, Is.EqualTo(expected.CBigint)); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); + Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CBigint, Is.EqualTo(y.CBigint)); + } } [Test] @@ -445,13 +469,12 @@ public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, }; var actual = await QuerySql.GetPostgresFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow expected, QuerySql.GetPostgresFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxVarchar, Is.EqualTo(expected.MaxVarchar)); - Assert.That(actual.MaxTimestamp, Is.EqualTo(expected.MaxTimestamp)); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } } private static IEnumerable PostgresGuidDataTypesTestCases @@ -517,9 +540,8 @@ public async Task TestPostgresTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in NpgsqlExampleGen returns QuerySql.GetAuthorRow? (nullable record struct) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); await transaction.CommitAsync(); var expected = new QuerySql.GetAuthorRow { @@ -527,8 +549,14 @@ public async Task TestPostgresTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -541,7 +569,7 @@ public async Task TestPostgresTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); await transaction.RollbackAsync(); var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } [Test] @@ -712,6 +740,7 @@ public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPostgresArrayTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); } } @@ -883,9 +912,9 @@ public async Task TestPostgresXmlDataTypes(string cXml) AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) { - if (x.CXml == null && y.CXml == null) - return; - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); } } diff --git a/end2end/EndToEndTestsLegacy/SqliteDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/SqliteDapperTester.generated.cs index 166a6016..b71db001 100644 --- a/end2end/EndToEndTestsLegacy/SqliteDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/SqliteDapperTester.generated.cs @@ -23,12 +23,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -53,21 +54,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -105,6 +103,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -118,12 +129,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -150,19 +162,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -206,31 +222,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -272,21 +280,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -304,7 +311,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -323,14 +343,13 @@ public async Task TestSqliteTypes(int? cInteger, decimal? cReal, string cText, b }; var actual = await QuerySql.GetSqliteTypes(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesRow expected, QuerySql.GetSqliteTypesRow actual) - { - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); + void AssertSingularEquals(QuerySql.GetSqliteTypesRow x, QuerySql.GetSqliteTypesRow y) + { + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + } } [Test] @@ -344,12 +363,13 @@ public async Task TestGetAuthorByIdWithMultipleNamedParam() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthorByIdWithMultipleNamedParam(new QuerySql.GetAuthorByIdWithMultipleNamedParamArgs { IdArg = 1111, Take = 1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -366,13 +386,12 @@ public async Task TestSqliteDataTypesOverride(int? cInteger, decimal cReal, stri }; var actual = await QuerySql.GetSqliteFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow expected, QuerySql.GetSqliteFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxReal, Is.EqualTo(expected.MaxReal)); - Assert.That(actual.MaxText, Is.EqualTo(expected.MaxText)); + void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow x, QuerySql.GetSqliteFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxReal, Is.EqualTo(y.MaxReal)); + Assert.That(x.MaxText, Is.EqualTo(y.MaxText)); + } } [Test] @@ -392,14 +411,13 @@ public async Task TestCopyFrom(int batchSize, int? cInteger, decimal? cReal, str }; var actual = await QuerySql.GetSqliteTypesCnt(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow expected, QuerySql.GetSqliteTypesCntRow actual) - { - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); + void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow x, QuerySql.GetSqliteTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + } } [Test] @@ -410,9 +428,8 @@ public async Task TestSqliteTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in SqliteExampleGen returns QuerySql.GetAuthorRow? (nullable record struct/class) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); transaction.Commit(); var expected = new QuerySql.GetAuthorRow { @@ -420,8 +437,14 @@ public async Task TestSqliteTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -434,7 +457,7 @@ public async Task TestSqliteTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); transaction.Rollback(); var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } } } diff --git a/end2end/EndToEndTestsLegacy/SqliteTester.generated.cs b/end2end/EndToEndTestsLegacy/SqliteTester.generated.cs index 70ea0b2b..2e434715 100644 --- a/end2end/EndToEndTestsLegacy/SqliteTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/SqliteTester.generated.cs @@ -23,12 +23,13 @@ public async Task TestOne() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -53,21 +54,18 @@ public async Task TestMany() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } - private static void AssertSequenceEquals(List x, List y) - { - Assert.That(x.Count, Is.EqualTo(y.Count)); - for (int i = 0; i < x.Count; i++) + void AssertSequenceEquals(List x, List y) { - AssertSingularEquals(x[i], y[i]); + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); } } @@ -105,6 +103,19 @@ public async Task TestExecRows() }; var actual = await this.QuerySql.ListAuthors(new QuerySql.ListAuthorsArgs { Limit = 2, Offset = 0 }); AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAuthorsRow x, QuerySql.ListAuthorsRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -118,12 +129,13 @@ public async Task TestExecLastId() Bio = "Quote that everyone always attribute to Einstein" }; var actual = await QuerySql.GetAuthorById(new QuerySql.GetAuthorByIdArgs { Id = id1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdRow x, QuerySql.GetAuthorByIdRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -150,19 +162,23 @@ public async Task TestSelfJoinEmbed() } }; var actual = await QuerySql.GetDuplicateAuthors(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Author2, y.Author2); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetDuplicateAuthorsRow x, QuerySql.GetDuplicateAuthorsRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Author2.Id, Is.EqualTo(y.Author2.Id)); + Assert.That(x.Author2.Name, Is.EqualTo(y.Author2.Name)); + Assert.That(x.Author2.Bio, Is.EqualTo(y.Author2.Bio)); + } - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -206,31 +222,23 @@ public async Task TestJoinEmbed() } }; var actual = await QuerySql.ListAllAuthorsBooks(); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) - { - return SingularEquals(x.Author, y.Author) && SingularEquals(x.Book, y.Book); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - y = y.OrderBy(o => o.Author.Name + o.Book.Name).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } - - private static bool SingularEquals(Author x, Author y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); - } + AssertSequenceEquals(expected, actual); + void AssertSingularEquals(QuerySql.ListAllAuthorsBooksRow x, QuerySql.ListAllAuthorsBooksRow y) + { + Assert.That(x.Author.Id, Is.EqualTo(y.Author.Id)); + Assert.That(x.Author.Name, Is.EqualTo(y.Author.Name)); + Assert.That(x.Author.Bio, Is.EqualTo(y.Author.Bio)); + Assert.That(x.Book.Id, Is.EqualTo(y.Book.Id)); + Assert.That(x.Book.AuthorId, Is.EqualTo(y.Book.AuthorId)); + Assert.That(x.Book.Name, Is.EqualTo(y.Book.Name)); + } - private static bool SingularEquals(Book x, Book y) - { - return x.Id.Equals(y.Id) && x.AuthorId.Equals(y.AuthorId) && x.Name.Equals(y.Name); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } } [Test] @@ -272,21 +280,20 @@ public async Task TestNargNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs()); - Assert.That(SequenceEquals(expected, actual)); - } - - private static bool SequenceEquals(List x, List y) - { - if (x.Count != y.Count) - return false; - x = x.OrderBy(o => o.Id).ToList(); - y = y.OrderBy(o => o.Id).ToList(); - return !x.Where((t, i) => !SingularEquals(t, y[i])).Any(); - } + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } - private static bool SingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -304,7 +311,20 @@ public async Task TestNargNotNull() } }; var actual = await this.QuerySql.GetAuthorByNamePattern(new QuerySql.GetAuthorByNamePatternArgs { NamePattern = "Bojack%" }); - Assert.That(SequenceEquals(expected, actual)); + AssertSequenceEquals(expected, actual); + void AssertSequenceEquals(List x, List y) + { + Assert.That(x.Count, Is.EqualTo(y.Count)); + for (int i = 0; i < x.Count; i++) + AssertSingularEquals(x[i], y[i]); + } + + void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuthorByNamePatternRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -323,14 +343,13 @@ public async Task TestSqliteTypes(int? cInteger, decimal? cReal, string cText, b }; var actual = await QuerySql.GetSqliteTypes(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesRow expected, QuerySql.GetSqliteTypesRow actual) - { - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); - Assert.That(actual.CBlob, Is.EqualTo(expected.CBlob)); + void AssertSingularEquals(QuerySql.GetSqliteTypesRow x, QuerySql.GetSqliteTypesRow y) + { + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + Assert.That(x.CBlob, Is.EqualTo(y.CBlob)); + } } [Test] @@ -344,12 +363,13 @@ public async Task TestGetAuthorByIdWithMultipleNamedParam() Bio = "Back in the 90s he was in a very famous TV show" }; var actual = await this.QuerySql.GetAuthorByIdWithMultipleNamedParam(new QuerySql.GetAuthorByIdWithMultipleNamedParamArgs { IdArg = 1111, Take = 1 }); - Assert.That(SingularEquals(expected, actual)); - } - - private static bool SingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) - { - return x.Id.Equals(y.Id) && x.Name.Equals(y.Name) && x.Bio.Equals(y.Bio); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorByIdWithMultipleNamedParamRow x, QuerySql.GetAuthorByIdWithMultipleNamedParamRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -366,13 +386,12 @@ public async Task TestSqliteDataTypesOverride(int? cInteger, decimal cReal, stri }; var actual = await QuerySql.GetSqliteFunctions(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow expected, QuerySql.GetSqliteFunctionsRow actual) - { - Assert.That(actual.MaxInteger, Is.EqualTo(expected.MaxInteger)); - Assert.That(actual.MaxReal, Is.EqualTo(expected.MaxReal)); - Assert.That(actual.MaxText, Is.EqualTo(expected.MaxText)); + void AssertSingularEquals(QuerySql.GetSqliteFunctionsRow x, QuerySql.GetSqliteFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxReal, Is.EqualTo(y.MaxReal)); + Assert.That(x.MaxText, Is.EqualTo(y.MaxText)); + } } [Test] @@ -392,14 +411,13 @@ public async Task TestCopyFrom(int batchSize, int? cInteger, decimal? cReal, str }; var actual = await QuerySql.GetSqliteTypesCnt(); AssertSingularEquals(expected, actual); - } - - private static void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow expected, QuerySql.GetSqliteTypesCntRow actual) - { - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); - Assert.That(actual.CInteger, Is.EqualTo(expected.CInteger)); - Assert.That(actual.CReal, Is.EqualTo(expected.CReal)); - Assert.That(actual.CText, Is.EqualTo(expected.CText)); + void AssertSingularEquals(QuerySql.GetSqliteTypesCntRow x, QuerySql.GetSqliteTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CInteger, Is.EqualTo(y.CInteger)); + Assert.That(x.CReal, Is.EqualTo(y.CReal)); + Assert.That(x.CText, Is.EqualTo(y.CText)); + } } [Test] @@ -410,9 +428,8 @@ public async Task TestSqliteTransaction() var transaction = connection.BeginTransaction(); var querySqlWithTx = QuerySql.WithTransaction(transaction); await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - // The GetAuthor method in SqliteExampleGen returns QuerySql.GetAuthorRow? (nullable record struct/class) - var actualNull = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actualNull == null, "there is author"); // This is correct for nullable types + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); transaction.Commit(); var expected = new QuerySql.GetAuthorRow { @@ -420,8 +437,14 @@ public async Task TestSqliteTransaction() Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }; - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(SingularEquals(expected, actual)); // Apply placeholder here + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } } [Test] @@ -434,7 +457,7 @@ public async Task TestSqliteTransactionRollback() await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); transaction.Rollback(); var actual = await this.QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - Assert.That(actual == null, "author should not exist after rollback"); + ClassicAssert.IsNull(actual); } } } From 3f8bd97490d2285d63975e45c2a67ef6c31f1093 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Wed, 13 Aug 2025 23:47:24 +0200 Subject: [PATCH 02/33] fix: change mysql set data type to map to HashSet instead of array --- CodeGenerator/Generators/EnumsGen.cs | 4 +- Drivers/DbDriver.cs | 207 +++++++++--------- Drivers/Generators/CommonGen.cs | 2 +- Drivers/MySqlConnectorDriver.cs | 86 ++++---- .../EndToEndScaffold/Templates/MySqlTests.cs | 49 ++++- .../MySqlConnectorDapperTester.generated.cs | 33 ++- .../MySqlConnectorTester.generated.cs | 33 ++- .../MySqlConnectorDapperTester.generated.cs | 33 ++- .../MySqlConnectorTester.generated.cs | 33 ++- .../MySqlConnectorDapperExample/Models.cs | 20 +- .../MySqlConnectorDapperExample/QuerySql.cs | 16 +- examples/MySqlConnectorDapperExample/Utils.cs | 29 +-- .../Models.cs | 20 +- .../QuerySql.cs | 14 +- .../Utils.cs | 29 +-- examples/MySqlConnectorExample/Models.cs | 20 +- examples/MySqlConnectorExample/QuerySql.cs | 28 +-- examples/MySqlConnectorExample/Utils.cs | 5 +- .../MySqlConnectorLegacyExample/Models.cs | 20 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 26 +-- examples/MySqlConnectorLegacyExample/Utils.cs | 5 +- 21 files changed, 405 insertions(+), 307 deletions(-) diff --git a/CodeGenerator/Generators/EnumsGen.cs b/CodeGenerator/Generators/EnumsGen.cs index f4649b85..777e4b75 100644 --- a/CodeGenerator/Generators/EnumsGen.cs +++ b/CodeGenerator/Generators/EnumsGen.cs @@ -37,9 +37,9 @@ public static class {{name}}Extensions return StringToEnum[me]; } - public static {{name}}[] To{{name}}Arr(this string me) + public static HashSet<{{name}}> To{{name}}Set(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet<{{name}}>(me.Split(',').ToList().Select(v => StringToEnum[v])); } } """)!; diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 1225266d..900192dc 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -200,6 +200,51 @@ public static void ConfigureSqlMapper() """)!]; } + public string GetColumnSchema(Column column) + { + return column.Table.Schema == DefaultSchema ? string.Empty : column.Table.Schema; + } + + public abstract string TransformQueryText(Query query); + + public abstract ConnectionGenCommands EstablishConnection(Query query); + + public abstract string CreateSqlCommand(string sqlTextConstant); + + /* Since there is no indication of the primary key column in SQLC protobuf (assuming it is a single column), + this method uses a few heuristics to assess the data type of the id column + */ + public string GetIdColumnType(Query query) + { + var tableColumns = Tables[query.InsertIntoTable.Schema][query.InsertIntoTable.Name].Columns; + var idColumn = tableColumns.First(c => c.Name.Equals("id", StringComparison.OrdinalIgnoreCase)); + if (idColumn is not null) + return GetCsharpType(idColumn, query); + + idColumn = tableColumns.First(c => c.Name.Contains("id", StringComparison.CurrentCultureIgnoreCase)); + return GetCsharpType(idColumn ?? tableColumns[0], query); + } + + public virtual string[] GetLastIdStatement(Query query) + { + var idColumnType = GetIdColumnType(query); + var convertFunc = ColumnMappings[idColumnType].ConvertFunc ?? + throw new InvalidOperationException($"ConvertFunc is missing for id column type {idColumnType}"); + var convertFuncCall = convertFunc(Variable.Result.AsVarName()); + return + [ + $"var {Variable.Result.AsVarName()} = await {Variable.Command.AsVarName()}.ExecuteScalarAsync();", + $"return {convertFuncCall};" + ]; + } + + public Column GetColumnFromParam(Parameter queryParam, Query query) + { + if (string.IsNullOrEmpty(queryParam.Column.Name)) + queryParam.Column.Name = $"{GetCsharpType(queryParam.Column, query).Replace("[]", "Arr")}_{queryParam.Number}"; + return queryParam.Column; + } + protected bool TypeExistsInQueries(string csharpType) { return Queries.Any(q => TypeExistsInQuery(csharpType, q)); @@ -213,27 +258,61 @@ protected bool TypeExistsInQuery(string csharpType, Query query) .Any(p => csharpType == GetCsharpTypeWithoutNullableSuffix(p.Column, query)); } - public string AddNullableSuffixIfNeeded(string csharpType, bool notNull) + protected bool SliceQueryExists() { - if (notNull) - return csharpType; - return IsTypeNullable(csharpType) ? $"{csharpType}?" : csharpType; + return Queries.Any(q => q.Params.Any(p => p.Column.IsSqlcSlice)); + } + + protected bool CopyFromQueryExists() + { + return Queries.Any(q => q.Cmd is ":copyfrom"); + } + + public OverrideOption? FindOverrideForQueryColumn(Query? query, Column column) + { + if (query is null) + return null; + return Options.Overrides.FirstOrDefault(o => + o.Column == $"{query.Name}:{column.Name}" || o.Column == $"*:{column.Name}"); + } + + // If the column data type is overridden, we need to check for nulls in generated code + public bool IsColumnNotNull(Column column, Query? query) + { + if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) + return csharpType.NotNull; + return column.NotNull; } + /* Data type methods */ public string GetCsharpType(Column column, Query? query) { var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); return AddNullableSuffixIfNeeded(csharpType, IsColumnNotNull(column, query)); } - public string GetColumnSchema(Column column) + public string AddNullableSuffixIfNeeded(string csharpType, bool notNull) { - return column.Table.Schema == DefaultSchema ? string.Empty : column.Table.Schema; + if (notNull) + return csharpType; + return IsTypeNullable(csharpType) ? $"{csharpType}?" : csharpType; } - public virtual string GetEnumTypeAsCsharpType(Column column, Plugin.Enum enumType) + protected string? GetColumnDbTypeOverride(Column column) { - return column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); + var columnType = column.Type.Name.ToLower(); + foreach (var columnMapping in ColumnMappings.Values) + { + if (columnMapping.DbTypes.TryGetValue(columnType, out var dbTypeOverride)) + return dbTypeOverride.NpgsqlTypeOverride; + } + throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); + } + + public bool IsTypeNullable(string csharpType) + { + if (NullableTypes.Contains(csharpType.Replace("?", ""))) return true; + return Options.DotnetFramework.IsDotnetCore(); // non-primitives in .Net Core are inherently nullable } public string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) @@ -245,7 +324,7 @@ public string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) return "object"; if (GetEnumType(column) is { } enumType) - return GetEnumTypeAsCsharpType(column, enumType); + return EnumToCsharpTypeName(column, enumType); if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) return csharpType.Type; @@ -260,16 +339,6 @@ public string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); } - public Plugin.Enum? GetEnumType(Column column) - { - if (column.Table is null) - return null; - var schemaName = GetColumnSchema(column); - if (!Enums.TryGetValue(schemaName, value: out var enumsInSchema)) - return null; - return enumsInSchema.GetValueOrDefault(column.Type.Name); - } - private static bool DoesColumnMappingApply(ColumnMapping columnMapping, Column column) { var columnType = column.Type.Name.ToLower(); @@ -280,6 +349,7 @@ private static bool DoesColumnMappingApply(ColumnMapping columnMapping, Column c return typeInfo.Length.Value == column.Length; } + /* Column reader methods */ private string GetColumnReader(CsharpTypeOption csharpTypeOption, int ordinal) { if (ColumnMappings.TryGetValue(csharpTypeOption.Type, out var value)) @@ -290,10 +360,10 @@ private string GetColumnReader(CsharpTypeOption csharpTypeOption, int ordinal) private string GetEnumReader(Column column, int ordinal, Plugin.Enum enumType) { var enumName = column.Type.Name.ToModelName(column.Table.Schema, DefaultSchema); - var fullEnumType = GetEnumTypeAsCsharpType(column, enumType); + var fullEnumType = EnumToCsharpTypeName(column, enumType); var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; - if (fullEnumType.EndsWith("[]")) - return $"{readStmt}.To{enumName}Arr()"; + if (fullEnumType.StartsWith("HashSet")) + return $"{readStmt}.To{enumName}Set()"; return $"{readStmt}.To{enumName}()"; } @@ -315,94 +385,19 @@ public string GetColumnReader(Column column, int ordinal, Query? query) throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); } - protected string? GetColumnDbTypeOverride(Column column) - { - var columnType = column.Type.Name.ToLower(); - foreach (var columnMapping in ColumnMappings.Values) - { - if (columnMapping.DbTypes.TryGetValue(columnType, out var dbTypeOverride)) - return dbTypeOverride.NpgsqlTypeOverride; - } - throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); - } - - public abstract string TransformQueryText(Query query); - - public abstract ConnectionGenCommands EstablishConnection(Query query); - - public abstract string CreateSqlCommand(string sqlTextConstant); - - public bool IsTypeNullable(string csharpType) - { - if (NullableTypes.Contains(csharpType.Replace("?", ""))) return true; - return Options.DotnetFramework.IsDotnetCore(); // non-primitives in .Net Core are inherently nullable - } - - /// - /// Since there is no indication of the primary key column in SQLC protobuf (assuming it is a single column), - /// this method uses a few heuristics to assess the data type of the id column - /// - /// - /// The data type of the id column - public string GetIdColumnType(Query query) - { - var tableColumns = Tables[query.InsertIntoTable.Schema][query.InsertIntoTable.Name].Columns; - var idColumn = tableColumns.First(c => c.Name.Equals("id", StringComparison.OrdinalIgnoreCase)); - if (idColumn is not null) - return GetCsharpType(idColumn, query); - - idColumn = tableColumns.First(c => c.Name.Contains("id", StringComparison.CurrentCultureIgnoreCase)); - return GetCsharpType(idColumn ?? tableColumns[0], query); - } - - public virtual string[] GetLastIdStatement(Query query) - { - var idColumnType = GetIdColumnType(query); - var convertFunc = ColumnMappings[idColumnType].ConvertFunc ?? - throw new InvalidOperationException($"ConvertFunc is missing for id column type {idColumnType}"); - var convertFuncCall = convertFunc(Variable.Result.AsVarName()); - return - [ - $"var {Variable.Result.AsVarName()} = await {Variable.Command.AsVarName()}.ExecuteScalarAsync();", - $"return {convertFuncCall};" - ]; - } - - public Column GetColumnFromParam(Parameter queryParam, Query query) - { - if (string.IsNullOrEmpty(queryParam.Column.Name)) - queryParam.Column.Name = $"{GetCsharpType(queryParam.Column, query).Replace("[]", "Arr")}_{queryParam.Number}"; - return queryParam.Column; - } - - protected bool SliceQueryExists() - { - return Queries.Any(q => q.Params.Any(p => p.Column.IsSqlcSlice)); - } - - protected bool CopyFromQueryExists() - { - return Queries.Any(q => q.Cmd is ":copyfrom"); - } - - public OverrideOption? FindOverrideForQueryColumn(Query? query, Column column) + /* Enum methods*/ + public Plugin.Enum? GetEnumType(Column column) { - if (query is null) + if (column.Table is null) return null; - return Options.Overrides.FirstOrDefault(o => - o.Column == $"{query.Name}:{column.Name}" || o.Column == $"*:{column.Name}"); + var schemaName = GetColumnSchema(column); + if (!Enums.TryGetValue(schemaName, value: out var enumsInSchema)) + return null; + return enumsInSchema.GetValueOrDefault(column.Type.Name); } - /// - /// If the column data type is overridden, we need to check for nulls in generated code - /// - /// - /// - /// Adjusted not null value - public bool IsColumnNotNull(Column column, Query? query) + public virtual string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) { - if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) - return csharpType.NotNull; - return column.NotNull; + return column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); } } \ No newline at end of file diff --git a/Drivers/Generators/CommonGen.cs b/Drivers/Generators/CommonGen.cs index 4a4a8892..16283e04 100644 --- a/Drivers/Generators/CommonGen.cs +++ b/Drivers/Generators/CommonGen.cs @@ -22,7 +22,7 @@ public static string GetMethodParameterList(string argInterface, IEnumerable { var stringJoinStmt = $"string.Join(\",\", {el})"; diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 4d9afabf..14ca92cc 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -18,11 +18,6 @@ public partial class MySqlConnectorDriver( IList queries) : DbDriver(options, defaultSchema, tables, enums, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { - public const string NullToStringCsvConverter = "NullToStringCsvConverter"; - public const string BoolToBitCsvConverter = "BoolToBitCsvConverter"; - public const string ByteCsvConverter = "ByteCsvConverter"; - public const string ByteArrayCsvConverter = "ByteArrayCsvConverter"; - public override Dictionary ColumnMappings { get; } = new() { @@ -158,16 +153,16 @@ public partial class MySqlConnectorDriver( private readonly Func _setTypeHandlerFunc = x => $$""" - private class {{x}}TypeHandler : SqlMapper.TypeHandler<{{x}}[]> + private class {{x}}TypeHandler : SqlMapper.TypeHandler> { - public override {{x}}[] Parse(object value) + public override HashSet<{{x}}> Parse(object value) { if (value is string s) - return s.To{{x}}Arr(); - throw new DataException($"Cannot convert {value?.GetType()} to {{x}}[]"); + return s.To{{x}}Set(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet<{{x}}>"); } - public override void SetValue(IDbDataParameter parameter, {{x}}[] value) + public override void SetValue(IDbDataParameter parameter, HashSet<{{x}}> value) { parameter.Value = string.Join(",", value); } @@ -219,24 +214,23 @@ public override ISet GetUsingDirectivesForUtils() "CsvHelper.Configuration" ], CopyFromQueryExists() + ) + .AddRangeExcludeNulls( + [ + "System.Collections.Generic" + ] ); } - private bool IsSetType(Column column) - { - var enumType = GetEnumType(column); - return enumType is not null && IsEnumOfTypeSet(column, enumType); - } - protected override ISet GetConfigureSqlMappings() { var setSqlMappings = Queries .SelectMany(q => q.Columns) - .Where(IsSetType) + .Where(IsSetDataType) .Select(c => { var enumName = c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema); - return $"SqlMapper.AddTypeHandler(typeof({enumName}[]), new {enumName}TypeHandler());"; + return $"SqlMapper.AddTypeHandler(typeof(HashSet<{enumName}>), new {enumName}TypeHandler());"; }) .Distinct(); @@ -252,7 +246,7 @@ private MemberDeclarationSyntax[] GetSetTypeHandlers() .Where(c => { var enumType = GetEnumType(c); - return enumType is not null && IsEnumOfTypeSet(c, enumType); + return enumType is not null && IsSetDataType(c, enumType); }) .Select(c => _setTypeHandlerFunc(c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema))) .Distinct() @@ -276,7 +270,7 @@ public override MemberDeclarationSyntax[] GetMemberDeclarationsForUtils() continue; foreach (var p in query.Params) { - if (!IsSetType(p.Column)) + if (!IsSetDataType(p.Column)) continue; var enumName = p.Column.Type.Name.ToModelName(GetColumnSchema(p.Column), DefaultSchema); memberDeclarations = memberDeclarations.AddRangeExcludeNulls([ParseMemberDeclaration(SetCsvConverterFunc(enumName))!]); @@ -359,8 +353,8 @@ public class {{x}}CsvConverter : DefaultTypeConverter { if (value == null) return @"\N"; - if (value is {{x}}[] arrVal) - return string.Join(",", arrVal); + if (value is HashSet<{{x}}> setVal) + return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } } @@ -409,6 +403,17 @@ public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argI return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); } + public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) + { + return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + /* :execlastid methods */ public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) { return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); @@ -423,15 +428,11 @@ public override string[] GetLastIdStatement(Query query) ]; } - public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) - { - return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } + /* :copyfrom methods */ + public const string NullToStringCsvConverter = "NullToStringCsvConverter"; + public const string BoolToBitCsvConverter = "BoolToBitCsvConverter"; + public const string ByteCsvConverter = "ByteCsvConverter"; + public const string ByteArrayCsvConverter = "ByteArrayCsvConverter"; public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) { @@ -508,7 +509,7 @@ private ISet GetCsvNullConverters(Query query) var csharpType = GetCsharpTypeWithoutNullableSuffix(p.Column, query); if ( !BoolAndByteTypes.Contains(csharpType) && - !IsSetType(p.Column) && + !IsSetDataType(p.Column) && TypeExistsInQuery(csharpType, query)) { var nullableCsharpType = AddNullableSuffixIfNeeded(csharpType, false); @@ -551,25 +552,32 @@ private ISet GetSetConverters(Query query) var converters = new HashSet(); foreach (var p in query.Params) { - if (!IsSetType(p.Column)) + if (!IsSetDataType(p.Column)) continue; var enumName = p.Column.Type.Name.ToModelName(GetColumnSchema(p.Column), DefaultSchema); var csvWriterVar = Variable.CsvWriter.AsVarName(); - converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"{enumName}[]", true)}>(new Utils.{enumName}CsvConverter());"); - converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"{enumName}[]", false)}>(new Utils.{enumName}CsvConverter());"); + converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"HashSet<{enumName}>", true)}>(new Utils.{enumName}CsvConverter());"); + converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"HashSet<{enumName}>", false)}>(new Utils.{enumName}CsvConverter());"); } return converters; } - private static bool IsEnumOfTypeSet(Column column, Plugin.Enum enumType) + /* Enum methods */ + public override string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) + { + var enumName = column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); + return IsSetDataType(column, enumType) ? $"HashSet<{enumName}>" : enumName; + } + + private static bool IsSetDataType(Column column, Plugin.Enum enumType) { return column.Length > enumType.Vals.Select(v => v.Length).Sum(); } - public override string GetEnumTypeAsCsharpType(Column column, Plugin.Enum enumType) + private bool IsSetDataType(Column column) { - var enumName = column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); - return IsEnumOfTypeSet(column, enumType) ? $"{enumName}[]" : enumName; + var enumType = GetEnumType(column); + return enumType is not null && IsSetDataType(column, enumType); } } \ No newline at end of file diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index dd061ae2..b9c86511 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -265,12 +265,27 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [KnownTestType.MySqlEnumDataType] = new TestImpl { Impl = $$""" + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData( + MysqlTypesCEnum.Medium, + new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee } + ).SetName("Valid Enum values"); + + yield return new TestCaseData( + null, + null + ).SetName("Enum with null values"); + } + } + [Test] - [TestCase(MysqlTypesCEnum.Medium, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(null, null)] + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] public async Task TestMySqlStringTypes( MysqlTypesCEnum? cEnum, - MysqlTypesCSet[] cSet) + HashSet cSet) { await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { @@ -637,14 +652,30 @@ public async Task TestMySqlTransactionRollback() [KnownTestType.MySqlEnumCopyFrom] = new TestImpl { Impl = $$""" + private static IEnumerable MySqlEnumCopyFromTestCases + { + get + { + yield return new TestCaseData( + 100, + MysqlTypesCEnum.Big, + new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee } + ).SetName("Valid Enum values"); + + yield return new TestCaseData( + 10, + null, + null + ).SetName("Enum with null values"); + } + } + [Test] - [TestCase(100, MysqlTypesCEnum.Big, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(500, MysqlTypesCEnum.Small, new[] { MysqlTypesCSet.Milk })] - [TestCase(10, null, null)] + [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] public async Task TestCopyFrom( int batchSize, MysqlTypesCEnum? cEnum, - MysqlTypesCSet[] cSet) + HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize) .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs @@ -683,14 +714,14 @@ await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs AuthorName = {{Consts.BojackAuthor}}, Name = {{Consts.BojackBookTitle}}, BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = {{Consts.BojackAuthor}}, Name = {{Consts.BojackBookTitle}}, BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index 0f96ece5..9035f675 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -515,10 +515,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } } + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(MysqlTypesCEnum.Medium, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(null, null)] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] + public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) { await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); var expected = new QuerySql.GetMysqlTypesRow @@ -560,13 +568,13 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator @@ -804,11 +812,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } } + private static IEnumerable MySqlEnumCopyFromTestCases + { + get + { + yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(10, null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(100, MysqlTypesCEnum.Big, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(500, MysqlTypesCEnum.Small, new[] { MysqlTypesCSet.Milk })] - [TestCase(10, null, null)] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] + public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); await QuerySql.InsertMysqlTypesBatch(batchArgs); diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 7a645979..891c0b9b 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -515,10 +515,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } } + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(MysqlTypesCEnum.Medium, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(null, null)] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] + public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) { await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); var expected = new QuerySql.GetMysqlTypesRow @@ -560,13 +568,13 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator @@ -804,11 +812,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } } + private static IEnumerable MySqlEnumCopyFromTestCases + { + get + { + yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(10, null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(100, MysqlTypesCEnum.Big, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(500, MysqlTypesCEnum.Small, new[] { MysqlTypesCSet.Milk })] - [TestCase(10, null, null)] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] + public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); await QuerySql.InsertMysqlTypesBatch(batchArgs); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 87e9c1f5..ffc0f636 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -515,10 +515,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } } + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(MysqlTypesCEnum.Medium, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(null, null)] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] + public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) { await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); var expected = new QuerySql.GetMysqlTypesRow @@ -560,13 +568,13 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator @@ -804,11 +812,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } } + private static IEnumerable MySqlEnumCopyFromTestCases + { + get + { + yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(10, null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(100, MysqlTypesCEnum.Big, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(500, MysqlTypesCEnum.Small, new[] { MysqlTypesCSet.Milk })] - [TestCase(10, null, null)] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] + public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); await QuerySql.InsertMysqlTypesBatch(batchArgs); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index 3f73f58a..c49bcd1f 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -515,10 +515,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } } + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(MysqlTypesCEnum.Medium, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(null, null)] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] + public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) { await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); var expected = new QuerySql.GetMysqlTypesRow @@ -560,13 +568,13 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new ExtendedBiosAuthorType[] { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, - AuthorType = new ExtendedBiosAuthorType[] + AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator @@ -804,11 +812,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } } + private static IEnumerable MySqlEnumCopyFromTestCases + { + get + { + yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(10, null, null).SetName("Enum with null values"); + } + } + [Test] - [TestCase(100, MysqlTypesCEnum.Big, new[] { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee })] - [TestCase(500, MysqlTypesCEnum.Small, new[] { MysqlTypesCSet.Milk })] - [TestCase(10, null, null)] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, MysqlTypesCSet[] cSet) + [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] + public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); await QuerySql.InsertMysqlTypesBatch(batchArgs); diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index 299063de..db9ff9a9 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -51,7 +51,7 @@ public class MysqlType public JsonElement? CJson { get; init; } public JsonElement? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } - public MysqlTypesCSet[]? CSet { get; init; } + public HashSet? CSet { get; init; } public byte? CBit { get; init; } public byte[]? CBinary { get; init; } public byte[]? CVarbinary { get; init; } @@ -65,7 +65,7 @@ public class ExtendedBio public string? AuthorName { get; init; } public string? Name { get; init; } public ExtendedBiosBioType? BioType { get; init; } - public ExtendedBiosAuthorType[]? AuthorType { get; init; } + public HashSet? AuthorType { get; init; } }; public enum MysqlTypesCEnum { @@ -89,9 +89,9 @@ public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) return StringToEnum[me]; } - public static MysqlTypesCEnum[] ToMysqlTypesCEnumArr(this string me) + public static HashSet ToMysqlTypesCEnumSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -117,9 +117,9 @@ public static MysqlTypesCSet ToMysqlTypesCSet(this string me) return StringToEnum[me]; } - public static MysqlTypesCSet[] ToMysqlTypesCSetArr(this string me) + public static HashSet ToMysqlTypesCSetSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -145,9 +145,9 @@ public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) return StringToEnum[me]; } - public static ExtendedBiosBioType[] ToExtendedBiosBioTypeArr(this string me) + public static HashSet ToExtendedBiosBioTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -173,8 +173,8 @@ public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) return StringToEnum[me]; } - public static ExtendedBiosAuthorType[] ToExtendedBiosAuthorTypeArr(this string me) + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index bbf56feb..9adde76a 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -558,7 +558,7 @@ public class InsertMysqlTypesArgs public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } - public MysqlTypesCSet[]? CSet { get; init; } + public HashSet? CSet { get; init; } public short? CYear { get; init; } public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } @@ -658,7 +658,7 @@ public class InsertMysqlTypesBatchArgs public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } - public MysqlTypesCSet[]? CSet { get; init; } + public HashSet? CSet { get; init; } public short? CYear { get; init; } public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } @@ -697,8 +697,8 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -765,7 +765,7 @@ public class GetMysqlTypesRow public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } - public MysqlTypesCSet[]? CSet { get; init; } + public HashSet? CSet { get; init; } public byte? CBit { get; init; } public byte[]? CBinary { get; init; } public byte[]? CVarbinary { get; init; } @@ -824,7 +824,7 @@ public class GetMysqlTypesCntRow public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } - public MysqlTypesCSet[]? CSet { get; init; } + public HashSet? CSet { get; init; } public short? CYear { get; init; } public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } @@ -908,7 +908,7 @@ public class CreateExtendedBioArgs public string? AuthorName { get; init; } public string? Name { get; init; } public ExtendedBiosBioType? BioType { get; init; } - public ExtendedBiosAuthorType[]? AuthorType { get; init; } + public HashSet? AuthorType { get; init; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -941,7 +941,7 @@ public class GetFirstExtendedBioByTypeRow public string? AuthorName { get; init; } public string? Name { get; init; } public ExtendedBiosBioType? BioType { get; init; } - public ExtendedBiosAuthorType[]? AuthorType { get; init; } + public HashSet? AuthorType { get; init; } }; public class GetFirstExtendedBioByTypeArgs { diff --git a/examples/MySqlConnectorDapperExample/Utils.cs b/examples/MySqlConnectorDapperExample/Utils.cs index b2b76e0f..5d7b107b 100644 --- a/examples/MySqlConnectorDapperExample/Utils.cs +++ b/examples/MySqlConnectorDapperExample/Utils.cs @@ -3,6 +3,7 @@ using CsvHelper.Configuration; using CsvHelper.TypeConversion; using Dapper; +using System.Collections.Generic; using System.Data; using System.Linq; using System.Text.Json; @@ -28,8 +29,8 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(MysqlTypesCSet[]), new MysqlTypesCSetTypeHandler()); - SqlMapper.AddTypeHandler(typeof(ExtendedBiosAuthorType[]), new ExtendedBiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -38,31 +39,31 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler + private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override MysqlTypesCSet[] Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlTypesCSetArr(); - throw new DataException($"Cannot convert {value?.GetType()} to MysqlTypesCSet[]"); + return s.ToMysqlTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, MysqlTypesCSet[] value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } } - private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler + private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override ExtendedBiosAuthorType[] Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToExtendedBiosAuthorTypeArr(); - throw new DataException($"Cannot convert {value?.GetType()} to ExtendedBiosAuthorType[]"); + return s.ToExtendedBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, ExtendedBiosAuthorType[] value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } @@ -74,8 +75,8 @@ public class MysqlTypesCSetCsvConverter : DefaultTypeConverter { if (value == null) return @"\N"; - if (value is MysqlTypesCSet[] arrVal) - return string.Join(",", arrVal); + if (value is HashSet setVal) + return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } } diff --git a/examples/MySqlConnectorDapperLegacyExample/Models.cs b/examples/MySqlConnectorDapperLegacyExample/Models.cs index 5ec196f7..52876531 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Models.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Models.cs @@ -52,7 +52,7 @@ public class MysqlType public JsonElement? CJson { get; set; } public JsonElement? CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } @@ -66,7 +66,7 @@ public class ExtendedBio public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public enum MysqlTypesCEnum { @@ -90,9 +90,9 @@ public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) return StringToEnum[me]; } - public static MysqlTypesCEnum[] ToMysqlTypesCEnumArr(this string me) + public static HashSet ToMysqlTypesCEnumSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -118,9 +118,9 @@ public static MysqlTypesCSet ToMysqlTypesCSet(this string me) return StringToEnum[me]; } - public static MysqlTypesCSet[] ToMysqlTypesCSetArr(this string me) + public static HashSet ToMysqlTypesCSetSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -146,9 +146,9 @@ public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) return StringToEnum[me]; } - public static ExtendedBiosBioType[] ToExtendedBiosBioTypeArr(this string me) + public static HashSet ToExtendedBiosBioTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -174,9 +174,9 @@ public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) return StringToEnum[me]; } - public static ExtendedBiosAuthorType[] ToExtendedBiosAuthorTypeArr(this string me) + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 80f6df60..f53135e9 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -559,7 +559,7 @@ public class InsertMysqlTypesArgs public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -659,7 +659,7 @@ public class InsertMysqlTypesBatchArgs public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -697,7 +697,7 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -764,7 +764,7 @@ public class GetMysqlTypesRow public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } @@ -823,7 +823,7 @@ public class GetMysqlTypesCntRow public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -907,7 +907,7 @@ public class CreateExtendedBioArgs public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -940,7 +940,7 @@ public class GetFirstExtendedBioByTypeRow public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public class GetFirstExtendedBioByTypeArgs { diff --git a/examples/MySqlConnectorDapperLegacyExample/Utils.cs b/examples/MySqlConnectorDapperLegacyExample/Utils.cs index 0bc1ed4a..04937233 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Utils.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Utils.cs @@ -5,6 +5,7 @@ namespace MySqlConnectorDapperLegacyExampleGen using CsvHelper.Configuration; using CsvHelper.TypeConversion; using Dapper; + using System.Collections.Generic; using System.Data; using System.Linq; using System.Text.Json; @@ -29,8 +30,8 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(MysqlTypesCSet[]), new MysqlTypesCSetTypeHandler()); - SqlMapper.AddTypeHandler(typeof(ExtendedBiosAuthorType[]), new ExtendedBiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -39,31 +40,31 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler + private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override MysqlTypesCSet[] Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlTypesCSetArr(); - throw new DataException($"Cannot convert {value?.GetType()} to MysqlTypesCSet[]"); + return s.ToMysqlTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, MysqlTypesCSet[] value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } } - private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler + private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override ExtendedBiosAuthorType[] Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToExtendedBiosAuthorTypeArr(); - throw new DataException($"Cannot convert {value?.GetType()} to ExtendedBiosAuthorType[]"); + return s.ToExtendedBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, ExtendedBiosAuthorType[] value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } @@ -75,8 +76,8 @@ public override string ConvertToString(object value, IWriterRow row, MemberMapDa { if (value == null) return @"\N"; - if (value is MysqlTypesCSet[] arrVal) - return string.Join(",", arrVal); + if (value is HashSet setVal) + return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } } diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index 9a1e896c..d50babf0 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -7,8 +7,8 @@ namespace MySqlConnectorExampleGen; public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); -public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, MysqlTypesCSet[]? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); -public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, ExtendedBiosAuthorType[]? AuthorType); +public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); +public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); public enum MysqlTypesCEnum { Invalid = 0, // reserved for invalid enum value @@ -31,9 +31,9 @@ public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) return StringToEnum[me]; } - public static MysqlTypesCEnum[] ToMysqlTypesCEnumArr(this string me) + public static HashSet ToMysqlTypesCEnumSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -59,9 +59,9 @@ public static MysqlTypesCSet ToMysqlTypesCSet(this string me) return StringToEnum[me]; } - public static MysqlTypesCSet[] ToMysqlTypesCSetArr(this string me) + public static HashSet ToMysqlTypesCSetSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -87,9 +87,9 @@ public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) return StringToEnum[me]; } - public static ExtendedBiosBioType[] ToExtendedBiosBioTypeArr(this string me) + public static HashSet ToExtendedBiosBioTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -115,8 +115,8 @@ public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) return StringToEnum[me]; } - public static ExtendedBiosAuthorType[] ToExtendedBiosAuthorTypeArr(this string me) + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index 378f5ff2..a15daeb9 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -655,7 +655,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; - public readonly record struct InsertMysqlTypesArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, MysqlTypesCSet[]? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public readonly record struct InsertMysqlTypesArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { if (this.Transaction == null) @@ -761,7 +761,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) } } - public readonly record struct InsertMysqlTypesBatchArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, MysqlTypesCSet[]? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public readonly record struct InsertMysqlTypesBatchArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task InsertMysqlTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; @@ -789,8 +789,8 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -824,7 +824,7 @@ public async Task InsertMysqlTypesBatch(List args) } private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; - public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, MysqlTypesCSet[]? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task GetMysqlTypes() { if (this.Transaction == null) @@ -871,7 +871,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), CBit = reader.IsDBNull(32) ? null : reader.GetFieldValue(32), CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), @@ -934,7 +934,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), CBit = reader.IsDBNull(32) ? null : reader.GetFieldValue(32), CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), @@ -951,7 +951,7 @@ public async Task InsertMysqlTypesBatch(List args) } private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; - public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, byte? CBit, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, MysqlTypesCSet[]? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, byte? CBit, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task GetMysqlTypesCnt() { if (this.Transaction == null) @@ -995,7 +995,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(25) ? null : JsonSerializer.Deserialize(reader.GetString(25)), CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), CEnum = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), CYear = reader.IsDBNull(29) ? null : reader.GetInt16(29), CDate = reader.IsDBNull(30) ? null : reader.GetDateTime(30), CDatetime = reader.IsDBNull(31) ? null : reader.GetDateTime(31), @@ -1058,7 +1058,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(25) ? null : JsonSerializer.Deserialize(reader.GetString(25)), CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), CEnum = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), CYear = reader.IsDBNull(29) ? null : reader.GetInt16(29), CDate = reader.IsDBNull(30) ? null : reader.GetDateTime(30), CDatetime = reader.IsDBNull(31) ? null : reader.GetDateTime(31), @@ -1163,7 +1163,7 @@ public async Task TruncateMysqlTypes() } private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, ExtendedBiosAuthorType[]? AuthorType); + public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); public async Task CreateExtendedBio(CreateExtendedBioArgs args) { if (this.Transaction == null) @@ -1202,7 +1202,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, ExtendedBiosAuthorType[]? AuthorType); + public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBiosBioType? BioType); public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { @@ -1223,7 +1223,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeArr() + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() }; } } @@ -1252,7 +1252,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeArr() + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() }; } } diff --git a/examples/MySqlConnectorExample/Utils.cs b/examples/MySqlConnectorExample/Utils.cs index 741d29f7..16c3cd16 100644 --- a/examples/MySqlConnectorExample/Utils.cs +++ b/examples/MySqlConnectorExample/Utils.cs @@ -2,6 +2,7 @@ using CsvHelper; using CsvHelper.Configuration; using CsvHelper.TypeConversion; +using System.Collections.Generic; using System.Linq; namespace MySqlConnectorExampleGen; @@ -19,8 +20,8 @@ public class MysqlTypesCSetCsvConverter : DefaultTypeConverter { if (value == null) return @"\N"; - if (value is MysqlTypesCSet[] arrVal) - return string.Join(",", arrVal); + if (value is HashSet setVal) + return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } } diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index 83f58691..26c2c7c6 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -52,7 +52,7 @@ public class MysqlType public JsonElement? CJson { get; set; } public JsonElement? CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } @@ -66,7 +66,7 @@ public class ExtendedBio public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public enum MysqlTypesCEnum { @@ -90,9 +90,9 @@ public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) return StringToEnum[me]; } - public static MysqlTypesCEnum[] ToMysqlTypesCEnumArr(this string me) + public static HashSet ToMysqlTypesCEnumSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -118,9 +118,9 @@ public static MysqlTypesCSet ToMysqlTypesCSet(this string me) return StringToEnum[me]; } - public static MysqlTypesCSet[] ToMysqlTypesCSetArr(this string me) + public static HashSet ToMysqlTypesCSetSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -146,9 +146,9 @@ public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) return StringToEnum[me]; } - public static ExtendedBiosBioType[] ToExtendedBiosBioTypeArr(this string me) + public static HashSet ToExtendedBiosBioTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } @@ -174,9 +174,9 @@ public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) return StringToEnum[me]; } - public static ExtendedBiosAuthorType[] ToExtendedBiosAuthorTypeArr(this string me) + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { - return me.Split(',').ToList().Select(v => StringToEnum[v]).ToArray(); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 0357dd17..89e6aa2c 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -771,7 +771,7 @@ public class InsertMysqlTypesArgs public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -917,7 +917,7 @@ public class InsertMysqlTypesBatchArgs public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -955,7 +955,7 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -1022,7 +1022,7 @@ public class GetMysqlTypesRow public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } @@ -1077,7 +1077,7 @@ public async Task GetMysqlTypes() CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), @@ -1140,7 +1140,7 @@ public async Task GetMysqlTypes() CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), @@ -1187,7 +1187,7 @@ public class GetMysqlTypesCntRow public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } - public MysqlTypesCSet[] CSet { get; set; } + public HashSet CSet { get; set; } public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } @@ -1242,7 +1242,7 @@ public async Task GetMysqlTypesCnt() CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), @@ -1305,7 +1305,7 @@ public async Task GetMysqlTypesCnt() CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetArr(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), @@ -1420,7 +1420,7 @@ public class CreateExtendedBioArgs public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -1465,7 +1465,7 @@ public class GetFirstExtendedBioByTypeRow public string AuthorName { get; set; } public string Name { get; set; } public ExtendedBiosBioType? BioType { get; set; } - public ExtendedBiosAuthorType[] AuthorType { get; set; } + public HashSet AuthorType { get; set; } }; public class GetFirstExtendedBioByTypeArgs { @@ -1490,7 +1490,7 @@ public async Task GetFirstExtendedBioByType(GetFir AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), BioType = reader.IsDBNull(2) ? (ExtendedBiosBioType? )null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeArr() + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() }; } } @@ -1519,7 +1519,7 @@ public async Task GetFirstExtendedBioByType(GetFir AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), BioType = reader.IsDBNull(2) ? (ExtendedBiosBioType? )null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeArr() + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() }; } } diff --git a/examples/MySqlConnectorLegacyExample/Utils.cs b/examples/MySqlConnectorLegacyExample/Utils.cs index 2912c7ea..f43f4088 100644 --- a/examples/MySqlConnectorLegacyExample/Utils.cs +++ b/examples/MySqlConnectorLegacyExample/Utils.cs @@ -4,6 +4,7 @@ namespace MySqlConnectorLegacyExampleGen using CsvHelper; using CsvHelper.Configuration; using CsvHelper.TypeConversion; + using System.Collections.Generic; using System.Linq; public static class Utils @@ -20,8 +21,8 @@ public override string ConvertToString(object value, IWriterRow row, MemberMapDa { if (value == null) return @"\N"; - if (value is MysqlTypesCSet[] arrVal) - return string.Join(",", arrVal); + if (value is HashSet setVal) + return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } } From ad2cc8974e32516ab0cae57785539e6338b5fada Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 00:07:25 +0200 Subject: [PATCH 03/33] fix: add test for invalid xml for postgres xml data type --- end2end/EndToEndScaffold/Config.cs | 3 ++ .../Templates/PostgresTests.cs | 15 +++++++ .../NpgsqlDapperTester.generated.cs | 6 +++ .../EndToEndTests/NpgsqlTester.generated.cs | 6 +++ .../NpgsqlDapperTester.generated.cs | 6 +++ .../NpgsqlTester.generated.cs | 6 +++ examples/NpgsqlDapperExample/Models.cs | 1 + examples/NpgsqlDapperExample/QuerySql.cs | 7 ++- examples/NpgsqlDapperExample/request.json | 45 ++++++++++++++++--- examples/NpgsqlDapperExample/request.message | 44 ++++++++++-------- examples/NpgsqlDapperLegacyExample/Models.cs | 1 + .../NpgsqlDapperLegacyExample/QuerySql.cs | 7 ++- .../NpgsqlDapperLegacyExample/request.json | 45 ++++++++++++++++--- .../NpgsqlDapperLegacyExample/request.message | 44 ++++++++++-------- examples/NpgsqlExample/Models.cs | 2 +- examples/NpgsqlExample/QuerySql.cs | 28 +++++++----- examples/NpgsqlExample/request.json | 45 ++++++++++++++++--- examples/NpgsqlExample/request.message | 44 ++++++++++-------- examples/NpgsqlLegacyExample/Models.cs | 1 + examples/NpgsqlLegacyExample/QuerySql.cs | 26 ++++++----- examples/NpgsqlLegacyExample/request.json | 45 ++++++++++++++++--- examples/NpgsqlLegacyExample/request.message | 44 ++++++++++-------- examples/config/postgresql/query.sql | 3 ++ examples/config/postgresql/schema.sql | 11 ++--- sqlc.ci.yaml | 16 +++++++ sqlc.local.generated.yaml | 16 +++++++ sqlc.request.generated.yaml | 16 +++++++ 27 files changed, 397 insertions(+), 136 deletions(-) diff --git a/end2end/EndToEndScaffold/Config.cs b/end2end/EndToEndScaffold/Config.cs index 9fcca5fb..ecd8af38 100644 --- a/end2end/EndToEndScaffold/Config.cs +++ b/end2end/EndToEndScaffold/Config.cs @@ -58,6 +58,7 @@ public enum KnownTestType PostgresJsonDataTypes, PostgresInvalidJson, PostgresXmlDataTypes, + PostgresInvalidXml, ArrayAsParam, MultipleArraysAsParams, @@ -205,6 +206,7 @@ internal static class Config KnownTestType.PostgresInvalidJson, KnownTestType.PostgresNetworkDataTypes, KnownTestType.PostgresXmlDataTypes, + KnownTestType.PostgresInvalidXml, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, @@ -249,6 +251,7 @@ internal static class Config KnownTestType.PostgresInvalidJson, KnownTestType.PostgresNetworkDataTypes, KnownTestType.PostgresXmlDataTypes, + KnownTestType.PostgresInvalidXml, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index bb147ba7..dbbd398c 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -1006,6 +1006,21 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } """ }, + + [KnownTestType.PostgresInvalidXml] = new TestImpl + { + Impl = $$""" + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await + QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + { + CXmlStringOverride = "SOME INVALID XML" + })); + } + """ + }, [KnownTestType.PostgresNetworkCopyFrom] = new TestImpl { Impl = $$""" diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 26a58db6..90f28c1c 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -918,6 +918,12 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 9ce57c51..56491e73 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -918,6 +918,12 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 28730b12..fe79061a 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -918,6 +918,12 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 02281d4c..46d02e8f 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -918,6 +918,12 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] public async Task TestArray() { diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 17307005..9745e304 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -48,6 +48,7 @@ public class PostgresType public JsonElement? CJsonb { get; init; } public string? CJsonpath { get; init; } public XmlDocument? CXml { get; init; } + public XmlDocument? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 9aa99e01..9bbde1cf 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -529,7 +529,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -558,6 +558,7 @@ public class InsertPostgresTypesArgs public JsonElement? CJsonb { get; init; } public string? CJsonpath { get; init; } public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -592,6 +593,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); queryParams.Add("c_jsonpath", args.CJsonpath); queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -683,7 +685,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -712,6 +714,7 @@ public class GetPostgresTypesRow public JsonElement? CJsonb { get; init; } public string? CJsonpath { get; init; } public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index c7520d91..f98ea5db 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -11,7 +11,7 @@ "codegen": { "out": "examples/NpgsqlDapperExample", "plugin": "csharp", - "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyRXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6InN0cmluZyJ9fV0sInRhcmdldEZyYW1ld29yayI6Im5ldDguMCIsInVzZURhcHBlciI6dHJ1ZX0=", + "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyRXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7Im5vdE51bGwiOmZhbHNlLCJ0eXBlIjoic3RyaW5nIn19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX1dLCJ0YXJnZXRGcmFtZXdvcmsiOiJuZXQ4LjAiLCJ1c2VEYXBwZXIiOnRydWV9", "process": { "cmd": "./dist/LocalRunner" } @@ -384,6 +384,16 @@ "name": "xml" } }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + } + }, { "name": "c_cidr", "length": -1, @@ -33207,7 +33217,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27,\n $28,\n $29::macaddr,\n $30::macaddr8\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33599,6 +33609,16 @@ }, { "number": 27, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33614,7 +33634,7 @@ } }, { - "number": 28, + "number": 29, "column": { "name": "c_inet", "length": -1, @@ -33630,7 +33650,7 @@ } }, { - "number": 29, + "number": 30, "column": { "name": "c_macaddr", "length": -1, @@ -33640,7 +33660,7 @@ } }, { - "number": 30, + "number": 31, "column": { "name": "c_macaddr8", "length": -1, @@ -34012,7 +34032,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34318,6 +34338,17 @@ }, "originalName": "c_xml" }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + }, + "originalName": "c_xml_string_override" + }, { "name": "c_cidr", "length": -1, @@ -35302,5 +35333,5 @@ } ], "sqlc_version": "v1.27.0", - "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0OC4wIiwibmFtZXNwYWNlTmFtZSI6Ik5wZ3NxbERhcHBlckV4YW1wbGVHZW4iLCJ1c2VEYXBwZXIiOnRydWUsIm92ZXJyaWRlRGFwcGVyVmVyc2lvbiI6IiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiaW50Iiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6IkRhdGVUaW1lIiwibm90TnVsbCI6dHJ1ZX19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fV0sImRlYnVnUmVxdWVzdCI6ZmFsc2V9" + "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0OC4wIiwibmFtZXNwYWNlTmFtZSI6Ik5wZ3NxbERhcHBlckV4YW1wbGVHZW4iLCJ1c2VEYXBwZXIiOnRydWUsIm92ZXJyaWRlRGFwcGVyVmVyc2lvbiI6IiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiaW50Iiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6IkRhdGVUaW1lIiwibm90TnVsbCI6dHJ1ZX19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX1dLCJkZWJ1Z1JlcXVlc3QiOmZhbHNlfQ==" } \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 8b05217a..ad993575 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -1,9 +1,9 @@ -ц +╗ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbЗ -examples/NpgsqlDapperExamplecsharp╚{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunnerйц public"¤publicГ +postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlb▄ +examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* +./dist/LocalRunnerцц public"║publicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextф + description0         Rbooksbtextб postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -59,7 +59,8 @@ pg_catalogvarchar1 c_jsonpath0         Rpostgres_typesb jsonpath+ -c_xml0         Rpostgres_typesbxml- +c_xml0         Rpostgres_typesbxml; +c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -10298,8 +10299,8 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlИ -мINSERT INTO postgres_types +name0         Rbooksbtextzname: query.sqlр +╒INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10327,6 +10328,7 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10359,10 +10361,11 @@ VALUES ( $24::jsonb, $25::jsonpath, $26::xml, - $27, + $27::xml, $28, - $29::macaddr, - $30::macaddr8 + $29, + $30::macaddr, + $31::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10396,10 +10399,11 @@ c_interval*PL c_jsonpath0         b jsonpath* -c_xml0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_xml0         bxml*-) +c_xml_string_override0         bxml*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr8: query.sqlBpostgres_typesЗ @@ -10481,8 +10485,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesн -ДSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ +ЯSELECT c_boolean, c_bit, c_smallint, @@ -10509,6 +10513,7 @@ c_interval*NJ c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10563,7 +10568,8 @@ pg_catalogvarcharzc_character_varying"; c_jsonpath0         Rpostgres_typesb jsonpathz c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"5 +c_xml0         Rpostgres_typesbxmlzc_xml"R +c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10747,4 +10753,4 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0* {"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*╘{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index b495a364..365caf6a 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -49,6 +49,7 @@ public class PostgresType public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 782d4024..0e189d61 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -530,7 +530,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -559,6 +559,7 @@ public class InsertPostgresTypesArgs public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -593,6 +594,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); queryParams.Add("c_jsonpath", args.CJsonpath); queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -684,7 +686,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -713,6 +715,7 @@ public class GetPostgresTypesRow public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 3360c681..279df59c 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -11,7 +11,7 @@ "codegen": { "out": "examples/NpgsqlDapperLegacyExample", "plugin": "csharp", - "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyTGVnYWN5RXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6InN0cmluZyJ9fV0sInRhcmdldEZyYW1ld29yayI6Im5ldHN0YW5kYXJkMi4wIiwidXNlRGFwcGVyIjp0cnVlfQ==", + "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyTGVnYWN5RXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7Im5vdE51bGwiOmZhbHNlLCJ0eXBlIjoic3RyaW5nIn19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX1dLCJ0YXJnZXRGcmFtZXdvcmsiOiJuZXRzdGFuZGFyZDIuMCIsInVzZURhcHBlciI6dHJ1ZX0=", "process": { "cmd": "./dist/LocalRunner" } @@ -384,6 +384,16 @@ "name": "xml" } }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + } + }, { "name": "c_cidr", "length": -1, @@ -33207,7 +33217,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27,\n $28,\n $29::macaddr,\n $30::macaddr8\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33599,6 +33609,16 @@ }, { "number": 27, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33614,7 +33634,7 @@ } }, { - "number": 28, + "number": 29, "column": { "name": "c_inet", "length": -1, @@ -33630,7 +33650,7 @@ } }, { - "number": 29, + "number": 30, "column": { "name": "c_macaddr", "length": -1, @@ -33640,7 +33660,7 @@ } }, { - "number": 30, + "number": 31, "column": { "name": "c_macaddr8", "length": -1, @@ -34012,7 +34032,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34318,6 +34338,17 @@ }, "originalName": "c_xml" }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + }, + "originalName": "c_xml_string_override" + }, { "name": "c_cidr", "length": -1, @@ -35302,5 +35333,5 @@ } ], "sqlc_version": "v1.27.0", - "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0c3RhbmRhcmQyLjAiLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyTGVnYWN5RXhhbXBsZUdlbiIsInVzZURhcHBlciI6dHJ1ZSwib3ZlcnJpZGVEYXBwZXJWZXJzaW9uIjoiIiwib3ZlcnJpZGVzIjpbeyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfaW50ZWdlciIsImNzaGFycF90eXBlIjp7InR5cGUiOiJpbnQiLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X3ZhcmNoYXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF90aW1lc3RhbXAiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiRGF0ZVRpbWUiLCJub3ROdWxsIjp0cnVlfX0seyJjb2x1bW4iOiIqOmNfanNvbl9zdHJpbmdfb3ZlcnJpZGUiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6Iio6Y19tYWNhZGRyOCIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19XSwiZGVidWdSZXF1ZXN0IjpmYWxzZX0=" + "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0c3RhbmRhcmQyLjAiLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRGFwcGVyTGVnYWN5RXhhbXBsZUdlbiIsInVzZURhcHBlciI6dHJ1ZSwib3ZlcnJpZGVEYXBwZXJWZXJzaW9uIjoiIiwib3ZlcnJpZGVzIjpbeyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfaW50ZWdlciIsImNzaGFycF90eXBlIjp7InR5cGUiOiJpbnQiLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X3ZhcmNoYXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF90aW1lc3RhbXAiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiRGF0ZVRpbWUiLCJub3ROdWxsIjp0cnVlfX0seyJjb2x1bW4iOiIqOmNfanNvbl9zdHJpbmdfb3ZlcnJpZGUiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6Iio6Y194bWxfc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fV0sImRlYnVnUmVxdWVzdCI6ZmFsc2V9" } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 0bf7eb12..0adb6866 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -1,9 +1,9 @@ -· +╧ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbЫ -"examples/NpgsqlDapperLegacyExamplecsharp╓{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunnerйц public"¤publicГ +postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbЁ +"examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* +./dist/LocalRunnerцц public"║publicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextф + description0         Rbooksbtextб postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -59,7 +59,8 @@ pg_catalogvarchar1 c_jsonpath0         Rpostgres_typesb jsonpath+ -c_xml0         Rpostgres_typesbxml- +c_xml0         Rpostgres_typesbxml; +c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -10298,8 +10299,8 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlИ -мINSERT INTO postgres_types +name0         Rbooksbtextzname: query.sqlр +╒INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10327,6 +10328,7 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10359,10 +10361,11 @@ VALUES ( $24::jsonb, $25::jsonpath, $26::xml, - $27, + $27::xml, $28, - $29::macaddr, - $30::macaddr8 + $29, + $30::macaddr, + $31::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10396,10 +10399,11 @@ c_interval*PL c_jsonpath0         b jsonpath* -c_xml0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_xml0         bxml*-) +c_xml_string_override0         bxml*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr8: query.sqlBpostgres_typesЗ @@ -10481,8 +10485,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesн -ДSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ +ЯSELECT c_boolean, c_bit, c_smallint, @@ -10509,6 +10513,7 @@ c_interval*NJ c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10563,7 +10568,8 @@ pg_catalogvarcharzc_character_varying"; c_jsonpath0         Rpostgres_typesb jsonpathz c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"5 +c_xml0         Rpostgres_typesbxmlzc_xml"R +c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10747,4 +10753,4 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*Н{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*т{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index ead0cb95..609dc1e9 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -10,6 +10,6 @@ namespace NpgsqlExampleGen; public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid); +public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid); public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); \ No newline at end of file diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index 8034e3f0..e7fea5c1 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -654,8 +654,8 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -690,6 +690,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -736,6 +737,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -788,8 +790,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -835,10 +837,11 @@ public async Task InsertPostgresTypesBatch(List ar xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 25), - CCidr = reader.IsDBNull(26) ? null : reader.GetFieldValue(26), - CInet = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CMacaddr = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr8 = reader.IsDBNull(29) ? null : reader.GetString(29) + CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CCidr = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), + CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), + CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), + CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) }; } } @@ -894,10 +897,11 @@ public async Task InsertPostgresTypesBatch(List ar xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 25), - CCidr = reader.IsDBNull(26) ? null : reader.GetFieldValue(26), - CInet = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CMacaddr = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr8 = reader.IsDBNull(29) ? null : reader.GetString(29) + CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CCidr = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), + CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), + CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), + CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) }; } } diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index e9e6d63f..a626ea49 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -11,7 +11,7 @@ "codegen": { "out": "examples/NpgsqlExample", "plugin": "csharp", - "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6InN0cmluZyJ9fV0sInRhcmdldEZyYW1ld29yayI6Im5ldDguMCIsInVzZURhcHBlciI6ZmFsc2V9", + "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsRXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7Im5vdE51bGwiOmZhbHNlLCJ0eXBlIjoic3RyaW5nIn19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX1dLCJ0YXJnZXRGcmFtZXdvcmsiOiJuZXQ4LjAiLCJ1c2VEYXBwZXIiOmZhbHNlfQ==", "process": { "cmd": "./dist/LocalRunner" } @@ -384,6 +384,16 @@ "name": "xml" } }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + } + }, { "name": "c_cidr", "length": -1, @@ -33207,7 +33217,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27,\n $28,\n $29::macaddr,\n $30::macaddr8\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33599,6 +33609,16 @@ }, { "number": 27, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33614,7 +33634,7 @@ } }, { - "number": 28, + "number": 29, "column": { "name": "c_inet", "length": -1, @@ -33630,7 +33650,7 @@ } }, { - "number": 29, + "number": 30, "column": { "name": "c_macaddr", "length": -1, @@ -33640,7 +33660,7 @@ } }, { - "number": 30, + "number": 31, "column": { "name": "c_macaddr8", "length": -1, @@ -34012,7 +34032,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34318,6 +34338,17 @@ }, "originalName": "c_xml" }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + }, + "originalName": "c_xml_string_override" + }, { "name": "c_cidr", "length": -1, @@ -35302,5 +35333,5 @@ } ], "sqlc_version": "v1.27.0", - "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0OC4wIiwibmFtZXNwYWNlTmFtZSI6Ik5wZ3NxbEV4YW1wbGVHZW4iLCJ1c2VEYXBwZXIiOmZhbHNlLCJvdmVycmlkZURhcHBlclZlcnNpb24iOiIiLCJvdmVycmlkZXMiOlt7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF9pbnRlZ2VyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6ImludCIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdmFyY2hhciIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X3RpbWVzdGFtcCIsImNzaGFycF90eXBlIjp7InR5cGUiOiJEYXRlVGltZSIsIm5vdE51bGwiOnRydWV9fSx7ImNvbHVtbiI6Iio6Y19qc29uX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX1dLCJkZWJ1Z1JlcXVlc3QiOmZhbHNlfQ==" + "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0OC4wIiwibmFtZXNwYWNlTmFtZSI6Ik5wZ3NxbEV4YW1wbGVHZW4iLCJ1c2VEYXBwZXIiOmZhbHNlLCJvdmVycmlkZURhcHBlclZlcnNpb24iOiIiLCJvdmVycmlkZXMiOlt7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF9pbnRlZ2VyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6ImludCIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdmFyY2hhciIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X3RpbWVzdGFtcCIsImNzaGFycF90eXBlIjp7InR5cGUiOiJEYXRlVGltZSIsIm5vdE51bGwiOnRydWV9fSx7ImNvbHVtbiI6Iio6Y19qc29uX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiKjpjX3htbF9zdHJpbmdfb3ZlcnJpZGUiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6Iio6Y19tYWNhZGRyOCIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19XSwiZGVidWdSZXF1ZXN0IjpmYWxzZX0=" } \ No newline at end of file diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 3d5a7604..98f46559 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -1,9 +1,9 @@ -█ +░ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlb№ -examples/NpgsqlExamplecsharp├{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunnerйц public"¤publicГ +postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlb╤ +examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* +./dist/LocalRunnerцц public"║publicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextф + description0         Rbooksbtextб postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -59,7 +59,8 @@ pg_catalogvarchar1 c_jsonpath0         Rpostgres_typesb jsonpath+ -c_xml0         Rpostgres_typesbxml- +c_xml0         Rpostgres_typesbxml; +c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -10298,8 +10299,8 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlИ -мINSERT INTO postgres_types +name0         Rbooksbtextzname: query.sqlр +╒INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10327,6 +10328,7 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10359,10 +10361,11 @@ VALUES ( $24::jsonb, $25::jsonpath, $26::xml, - $27, + $27::xml, $28, - $29::macaddr, - $30::macaddr8 + $29, + $30::macaddr, + $31::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10396,10 +10399,11 @@ c_interval*PL c_jsonpath0         b jsonpath* -c_xml0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_xml0         bxml*-) +c_xml_string_override0         bxml*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr8: query.sqlBpostgres_typesЗ @@ -10481,8 +10485,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesн -ДSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ +ЯSELECT c_boolean, c_bit, c_smallint, @@ -10509,6 +10513,7 @@ c_interval*NJ c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10563,7 +10568,8 @@ pg_catalogvarcharzc_character_varying"; c_jsonpath0         Rpostgres_typesb jsonpathz c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"5 +c_xml0         Rpostgres_typesbxmlzc_xml"R +c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10747,4 +10753,4 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*·{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*╧{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 3778fa9f..1ddd96b2 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -49,6 +49,7 @@ public class PostgresType public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index c7df7495..a72f22d8 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -752,7 +752,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -781,6 +781,7 @@ public class InsertPostgresTypesArgs public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -820,6 +821,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -866,6 +868,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -943,7 +946,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -972,6 +975,7 @@ public class GetPostgresTypesRow public JsonElement? CJsonb { get; set; } public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -1022,10 +1026,11 @@ public async Task GetPostgresTypes() xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 25), - CCidr = reader.IsDBNull(26) ? (NpgsqlCidr? )null : reader.GetFieldValue(26), - CInet = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CMacaddr = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr8 = reader.IsDBNull(29) ? null : reader.GetString(29) + CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CCidr = reader.IsDBNull(27) ? (NpgsqlCidr? )null : reader.GetFieldValue(27), + CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), + CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), + CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) }; } } @@ -1081,10 +1086,11 @@ public async Task GetPostgresTypes() xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 25), - CCidr = reader.IsDBNull(26) ? (NpgsqlCidr? )null : reader.GetFieldValue(26), - CInet = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CMacaddr = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr8 = reader.IsDBNull(29) ? null : reader.GetString(29) + CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CCidr = reader.IsDBNull(27) ? (NpgsqlCidr? )null : reader.GetFieldValue(27), + CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), + CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), + CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) }; } } diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index 54141e9d..c4a512ca 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -11,7 +11,7 @@ "codegen": { "out": "examples/NpgsqlLegacyExample", "plugin": "csharp", - "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsTGVnYWN5RXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6InN0cmluZyJ9fV0sInRhcmdldEZyYW1ld29yayI6Im5ldHN0YW5kYXJkMi4wIiwidXNlRGFwcGVyIjpmYWxzZX0=", + "options": "eyJkZWJ1Z1JlcXVlc3QiOnRydWUsImdlbmVyYXRlQ3Nwcm9qIjp0cnVlLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsTGVnYWN5RXhhbXBsZUdlbiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJub3ROdWxsIjpmYWxzZSwidHlwZSI6ImludCJ9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6dHJ1ZSwidHlwZSI6IkRhdGVUaW1lIn19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7Im5vdE51bGwiOmZhbHNlLCJ0eXBlIjoic3RyaW5nIn19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsibm90TnVsbCI6ZmFsc2UsInR5cGUiOiJzdHJpbmcifX1dLCJ0YXJnZXRGcmFtZXdvcmsiOiJuZXRzdGFuZGFyZDIuMCIsInVzZURhcHBlciI6ZmFsc2V9", "process": { "cmd": "./dist/LocalRunner" } @@ -384,6 +384,16 @@ "name": "xml" } }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + } + }, { "name": "c_cidr", "length": -1, @@ -33207,7 +33217,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27,\n $28,\n $29::macaddr,\n $30::macaddr8\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33599,6 +33609,16 @@ }, { "number": 27, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33614,7 +33634,7 @@ } }, { - "number": 28, + "number": 29, "column": { "name": "c_inet", "length": -1, @@ -33630,7 +33650,7 @@ } }, { - "number": 29, + "number": 30, "column": { "name": "c_macaddr", "length": -1, @@ -33640,7 +33660,7 @@ } }, { - "number": 30, + "number": 31, "column": { "name": "c_macaddr8", "length": -1, @@ -34012,7 +34032,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34318,6 +34338,17 @@ }, "originalName": "c_xml" }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "xml" + }, + "originalName": "c_xml_string_override" + }, { "name": "c_cidr", "length": -1, @@ -35302,5 +35333,5 @@ } ], "sqlc_version": "v1.27.0", - "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0c3RhbmRhcmQyLjAiLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsTGVnYWN5RXhhbXBsZUdlbiIsInVzZURhcHBlciI6ZmFsc2UsIm92ZXJyaWRlRGFwcGVyVmVyc2lvbiI6IiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiaW50Iiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6IkRhdGVUaW1lIiwibm90TnVsbCI6dHJ1ZX19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiIqOmNfbWFjYWRkcjgiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoic3RyaW5nIiwibm90TnVsbCI6ZmFsc2V9fV0sImRlYnVnUmVxdWVzdCI6ZmFsc2V9" + "plugin_options": "eyJvdmVycmlkZURyaXZlclZlcnNpb24iOiIiLCJnZW5lcmF0ZUNzcHJvaiI6dHJ1ZSwidGFyZ2V0RnJhbWV3b3JrIjoibmV0c3RhbmRhcmQyLjAiLCJuYW1lc3BhY2VOYW1lIjoiTnBnc3FsTGVnYWN5RXhhbXBsZUdlbiIsInVzZURhcHBlciI6ZmFsc2UsIm92ZXJyaWRlRGFwcGVyVmVyc2lvbiI6IiIsIm92ZXJyaWRlcyI6W3siY29sdW1uIjoiR2V0UG9zdGdyZXNGdW5jdGlvbnM6bWF4X2ludGVnZXIiLCJjc2hhcnBfdHlwZSI6eyJ0eXBlIjoiaW50Iiwibm90TnVsbCI6ZmFsc2V9fSx7ImNvbHVtbiI6IkdldFBvc3RncmVzRnVuY3Rpb25zOm1heF92YXJjaGFyIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiJHZXRQb3N0Z3Jlc0Z1bmN0aW9uczptYXhfdGltZXN0YW1wIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6IkRhdGVUaW1lIiwibm90TnVsbCI6dHJ1ZX19LHsiY29sdW1uIjoiKjpjX2pzb25fc3RyaW5nX292ZXJyaWRlIiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX0seyJjb2x1bW4iOiIqOmNfeG1sX3N0cmluZ19vdmVycmlkZSIsImNzaGFycF90eXBlIjp7InR5cGUiOiJzdHJpbmciLCJub3ROdWxsIjpmYWxzZX19LHsiY29sdW1uIjoiKjpjX21hY2FkZHI4IiwiY3NoYXJwX3R5cGUiOnsidHlwZSI6InN0cmluZyIsIm5vdE51bGwiOmZhbHNlfX1dLCJkZWJ1Z1JlcXVlc3QiOmZhbHNlfQ==" } \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index f5579efe..32727fcf 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -1,9 +1,9 @@ -я +─ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbР -examples/NpgsqlLegacyExamplecsharp╤{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunnerйц public"¤publicГ +postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbх +examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* +./dist/LocalRunnerцц public"║publicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextф + description0         Rbooksbtextб postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -59,7 +59,8 @@ pg_catalogvarchar1 c_jsonpath0         Rpostgres_typesb jsonpath+ -c_xml0         Rpostgres_typesbxml- +c_xml0         Rpostgres_typesbxml; +c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -10298,8 +10299,8 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlИ -мINSERT INTO postgres_types +name0         Rbooksbtextzname: query.sqlр +╒INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10327,6 +10328,7 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10359,10 +10361,11 @@ VALUES ( $24::jsonb, $25::jsonpath, $26::xml, - $27, + $27::xml, $28, - $29::macaddr, - $30::macaddr8 + $29, + $30::macaddr, + $31::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10396,10 +10399,11 @@ c_interval*PL c_jsonpath0         b jsonpath* -c_xml0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_xml0         bxml*-) +c_xml_string_override0         bxml*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr8: query.sqlBpostgres_typesЗ @@ -10481,8 +10485,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesн -ДSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ +ЯSELECT c_boolean, c_bit, c_smallint, @@ -10509,6 +10513,7 @@ c_interval*NJ c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10563,7 +10568,8 @@ pg_catalogvarcharzc_character_varying"; c_jsonpath0         Rpostgres_typesb jsonpathz c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"5 +c_xml0         Rpostgres_typesbxmlzc_xml"R +c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10747,4 +10753,4 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*И{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*▌{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/config/postgresql/query.sql b/examples/config/postgresql/query.sql index 085d6390..967a1252 100644 --- a/examples/config/postgresql/query.sql +++ b/examples/config/postgresql/query.sql @@ -99,6 +99,7 @@ INSERT INTO postgres_types c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -131,6 +132,7 @@ VALUES ( sqlc.narg('c_jsonb')::jsonb, sqlc.narg('c_jsonpath')::jsonpath, sqlc.narg('c_xml')::xml, + sqlc.narg('c_xml_string_override')::xml, sqlc.narg('c_cidr'), sqlc.narg('c_inet'), sqlc.narg('c_macaddr')::macaddr, @@ -218,6 +220,7 @@ SELECT c_jsonb, c_jsonpath, c_xml, + c_xml_string_override, c_cidr, c_inet, c_macaddr, diff --git a/examples/config/postgresql/schema.sql b/examples/config/postgresql/schema.sql index 62b03f38..178185d4 100644 --- a/examples/config/postgresql/schema.sql +++ b/examples/config/postgresql/schema.sql @@ -41,12 +41,13 @@ CREATE TABLE postgres_types ( c_bpchar BPCHAR(100), c_text TEXT, - /* JSON Data Types */ - c_json JSON, + /* Unstructured Data Types */ + c_json JSON, c_json_string_override JSON, - c_jsonb JSONB, - c_jsonpath JSONPATH, - c_xml XML, + c_jsonb JSONB, + c_jsonpath JSONPATH, + c_xml XML, + c_xml_string_override XML, /* Network Address Data Types */ c_cidr CIDR, diff --git a/sqlc.ci.yaml b/sqlc.ci.yaml index 79445e6e..7890db42 100644 --- a/sqlc.ci.yaml +++ b/sqlc.ci.yaml @@ -34,6 +34,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -66,6 +70,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -98,6 +106,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -130,6 +142,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" diff --git a/sqlc.local.generated.yaml b/sqlc.local.generated.yaml index 2f8f897f..5be5e915 100644 --- a/sqlc.local.generated.yaml +++ b/sqlc.local.generated.yaml @@ -33,6 +33,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -65,6 +69,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -97,6 +105,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -129,6 +141,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" diff --git a/sqlc.request.generated.yaml b/sqlc.request.generated.yaml index bbf6ba5a..a53a00c8 100644 --- a/sqlc.request.generated.yaml +++ b/sqlc.request.generated.yaml @@ -33,6 +33,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -66,6 +70,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -99,6 +107,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" @@ -132,6 +144,10 @@ sql: csharp_type: type: "string" notNull: false + - column: "*:c_xml_string_override" + csharp_type: + type: "string" + notNull: false - column: "*:c_macaddr8" csharp_type: type: "string" From 5531b1ab3458aea94ceb44e82de5b981ee25e17a Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 11:02:22 +0200 Subject: [PATCH 04/33] fix: split postgres authors and data types tables to different files --- .../Templates/PostgresTests.cs | 22 +- end2end/EndToEndTests/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 22 +- end2end/EndToEndTests/NpgsqlTester.cs | 1 + .../EndToEndTests/NpgsqlTester.generated.cs | 22 +- .../EndToEndTestsLegacy/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 22 +- end2end/EndToEndTestsLegacy/NpgsqlTester.cs | 1 + .../NpgsqlTester.generated.cs | 22 +- examples/NpgsqlDapperExample/Models.cs | 31 +- examples/NpgsqlDapperExample/QuerySql.cs | 275 +++-- examples/NpgsqlDapperExample/request.json | 942 +++++++++--------- examples/NpgsqlDapperExample/request.message | 236 +++-- examples/NpgsqlDapperLegacyExample/Models.cs | 31 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 275 +++-- .../NpgsqlDapperLegacyExample/request.json | 942 +++++++++--------- .../NpgsqlDapperLegacyExample/request.message | 236 +++-- examples/NpgsqlExample/Models.cs | 7 +- examples/NpgsqlExample/QuerySql.cs | 383 ++++--- examples/NpgsqlExample/request.json | 942 +++++++++--------- examples/NpgsqlExample/request.message | 236 +++-- examples/NpgsqlLegacyExample/Models.cs | 31 +- examples/NpgsqlLegacyExample/QuerySql.cs | 453 +++++---- examples/NpgsqlLegacyExample/request.json | 942 +++++++++--------- examples/NpgsqlLegacyExample/request.message | 236 +++-- examples/config/postgresql/Dockerfile | 6 +- examples/config/postgresql/authors/query.sql | 71 ++ examples/config/postgresql/authors/schema.sql | 13 + .../config/postgresql/{ => types}/query.sql | 174 ++-- .../config/postgresql/{ => types}/schema.sql | 47 +- scripts/sync_sqlc_options.sh | 8 +- sqlc.ci.yaml | 16 +- sqlc.local.generated.yaml | 16 +- sqlc.request.generated.yaml | 16 +- 34 files changed, 3687 insertions(+), 2992 deletions(-) create mode 100644 examples/config/postgresql/authors/query.sql create mode 100644 examples/config/postgresql/authors/schema.sql rename examples/config/postgresql/{ => types}/query.sql (71%) rename examples/config/postgresql/{ => types}/schema.sql (79%) diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index dbbd398c..9596247f 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -476,7 +476,7 @@ public async Task TestPostgresJsonDataTypes( if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, @@ -484,7 +484,7 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, @@ -492,10 +492,10 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -516,13 +516,13 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy public void TestPostgresInvalidJson() { Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); @@ -984,20 +984,20 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -1014,7 +1014,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy public void TestPostgresInvalidXml() { Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.cs b/end2end/EndToEndTests/NpgsqlDapperTester.cs index 425855b6..7dfc2e07 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.cs @@ -17,5 +17,6 @@ public async Task EmptyTestsTable() await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); + await QuerySql.TruncatePostgresUnstructuredTypes(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 90f28c1c..e18afe45 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -861,17 +861,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -887,8 +887,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] @@ -903,14 +903,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -921,7 +921,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] diff --git a/end2end/EndToEndTests/NpgsqlTester.cs b/end2end/EndToEndTests/NpgsqlTester.cs index 39093f3e..6105a2dc 100644 --- a/end2end/EndToEndTests/NpgsqlTester.cs +++ b/end2end/EndToEndTests/NpgsqlTester.cs @@ -17,5 +17,6 @@ public async Task EmptyTestsTables() await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); + await QuerySql.TruncatePostgresUnstructuredTypes(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 56491e73..c18ef139 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -861,17 +861,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -887,8 +887,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] @@ -903,14 +903,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -921,7 +921,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs index faf30835..0dab21a9 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs @@ -17,6 +17,7 @@ public async Task EmptyTestsTable() await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); + await QuerySql.TruncatePostgresUnstructuredTypes(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index fe79061a..a179526e 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -861,17 +861,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -887,8 +887,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] @@ -903,14 +903,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -921,7 +921,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs index 202843f4..cc33e3ec 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs @@ -17,6 +17,7 @@ public async Task EmptyTestsTable() await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); + await QuerySql.TruncatePostgresUnstructuredTypes(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 46d02e8f..7377d05c 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -861,17 +861,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -887,8 +887,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] @@ -903,14 +903,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresUnstructuredTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -921,7 +921,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 9745e304..b85a79c8 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -43,27 +43,20 @@ public class PostgresType public string? CCharacterVarying { get; init; } public string? CBpchar { get; init; } public string? CText { get; init; } - public JsonElement? CJson { get; init; } - public JsonElement? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public XmlDocument? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } public string? CMacaddr8 { get; init; } public Guid? CUuid { get; init; } }; -public class PostgresGeometricType +public class PostgresUnstructuredType { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public JsonElement? CJson { get; init; } + public JsonElement? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public XmlDocument? CXmlStringOverride { get; init; } }; public class PostgresArrayType { @@ -74,4 +67,14 @@ public class PostgresArrayType public decimal[]? CDecimalArray { get; init; } public DateTime[]? CDateArray { get; init; } public DateTime[]? CTimestampArray { get; init; } +}; +public class PostgresGeometricType +{ + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 9bbde1cf..62f2045f 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -529,7 +529,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -553,12 +553,6 @@ public class InsertPostgresTypesArgs public string? CBpchar { get; init; } public string? CText { get; init; } public Guid? CUuid { get; init; } - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public string? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -588,12 +582,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_bpchar", args.CBpchar); queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -685,7 +673,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -709,12 +697,6 @@ public class GetPostgresTypesRow public string? CBpchar { get; init; } public string? CText { get; init; } public Guid? CUuid { get; init; } - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public string? CXmlStringOverride { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -812,32 +794,14 @@ public class GetPostgresFunctionsRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesSql = "INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs - { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } - }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresTypesSql); } return; @@ -848,66 +812,36 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs - { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } - }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } }; - public async Task GetPostgresGeoTypes() + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); - return result; + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -915,20 +849,28 @@ public class GetPostgresGeoTypesRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow + { + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } + }; + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -936,17 +878,17 @@ public async Task TruncatePostgresTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); } return; @@ -957,10 +899,10 @@ public async Task TruncatePostgresGeoTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = "INSERT INTO postgres_array_types(c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; public class InsertPostgresArrayTypesArgs { public byte[]? CBytea { get; init; } @@ -1099,4 +1041,131 @@ public async Task TruncatePostgresArrayTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } + + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs + { + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } + }; + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs + { + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } + }; + public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } + }; + public async Task GetPostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + } } \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index f98ea5db..9b7fdd3d 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperExample", @@ -334,66 +336,6 @@ "name": "text" } }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, { "name": "c_cidr", "length": -1, @@ -448,77 +390,67 @@ }, { "rel": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - } - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" } }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" } }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" } }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" } }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" } }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" } } ] @@ -615,6 +547,83 @@ "arrayDims": 1 } ] + }, + { + "rel": { + "name": "postgres_geometric_types" + }, + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + } + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + } + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + } + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + } + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + } + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + } + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + } + } + ] } ] }, @@ -33217,7 +33226,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33559,66 +33568,6 @@ }, { "number": 22, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 23, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 24, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 25, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 26, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 27, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33634,7 +33583,7 @@ } }, { - "number": 29, + "number": 23, "column": { "name": "c_inet", "length": -1, @@ -33650,7 +33599,7 @@ } }, { - "number": 30, + "number": 24, "column": { "name": "c_macaddr", "length": -1, @@ -33660,7 +33609,7 @@ } }, { - "number": 31, + "number": 25, "column": { "name": "c_macaddr8", "length": -1, @@ -33670,6 +33619,9 @@ } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_types" @@ -34032,7 +33984,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34283,72 +34235,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - }, - "originalName": "c_jsonb" - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - }, - "originalName": "c_jsonpath" - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" - }, { "name": "c_cidr", "length": -1, @@ -34714,336 +34600,167 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_point", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - } - }, - { - "number": 2, - "column": { - "name": "c_line", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - } - }, - { - "number": 3, - "column": { - "name": "c_lseg", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - } - }, - { - "number": 4, - "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_geometric_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "point" - }, - "originalName": "c_point" + "name": "json" + } } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "line" - }, - "originalName": "c_line" + "name": "json" + } } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_jsonb", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "lseg" - }, - "originalName": "c_lseg" + "name": "jsonb" + } } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_jsonpath", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "box" - }, - "originalName": "c_box" + "name": "jsonpath" + } } }, { "number": 5, "column": { - "name": "c_path", + "name": "c_xml", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "path" - }, - "originalName": "c_path" + "name": "xml" + } } }, { "number": 6, "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" }, - "originalName": "c_line" + "originalName": "c_json" }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" }, - "originalName": "c_lseg" + "originalName": "c_json_string_override" }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" }, - "originalName": "c_box" + "originalName": "c_jsonb" }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" }, - "originalName": "c_path" + "originalName": "c_jsonpath" }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" }, - "originalName": "c_polygon" + "originalName": "c_xml" }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" }, - "originalName": "c_circle" + "originalName": "c_xml_string_override" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", "cmd": ":exec", "parameters": [ @@ -35165,6 +34882,9 @@ } } ], + "comments": [ + " Array types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_array_types" @@ -35330,6 +35050,332 @@ "name": "TruncatePostgresArrayTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index ad993575..265c9e1c 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -1,9 +1,9 @@ -╗ +д 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlb▄ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunnerцц public"║publicГ +./dist/LocalRunner╓ч public"кpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextб + description0         Rbooksbtextс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -53,29 +53,22 @@ pg_catalogvarcharI pg_catalogvarchar1 c_bpchar0         Rpostgres_typesbbpchar- c_text0         Rpostgres_typesbtext- -c_json0         Rpostgres_typesbjson= -c_json_string_override0         Rpostgres_typesbjson/ -c_jsonb0         Rpostgres_typesbjsonb5 - -c_jsonpath0         Rpostgres_typesb -jsonpath+ -c_xml0         Rpostgres_typesbxml; -c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuid╡ -postgres_geometric_types9 -c_point0         Rpostgres_geometric_typesbpoint7 -c_line0         Rpostgres_geometric_typesbline7 -c_lseg0         Rpostgres_geometric_typesblseg5 -c_box0         Rpostgres_geometric_typesbbox7 -c_path0         Rpostgres_geometric_typesbpath= - c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleХ +c_uuid0         Rpostgres_typesbuuidн +postgres_unstructured_types: +c_json0         Rpostgres_unstructured_typesbjsonJ +c_json_string_override0         Rpostgres_unstructured_typesbjson< +c_jsonb0         Rpostgres_unstructured_typesbjsonbB + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpath8 +c_xml0         Rpostgres_unstructured_typesbxmlH +c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -87,7 +80,15 @@ pg_catalogint4 pg_catalognumericИ> c_date_array 0         Rpostgres_array_typesbdateИT c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampИ" pg_temp"ц▓ +pg_catalog timestampИ╡ +postgres_geometric_types9 +c_point0         Rpostgres_geometric_typesbpoint7 +c_line0         Rpostgres_geometric_typesbline7 +c_lseg0         Rpostgres_geometric_typesblseg5 +c_box0         Rpostgres_geometric_typesbbox7 +c_path0         Rpostgres_geometric_typesbpath= + c_polygon0         Rpostgres_geometric_typesb polygon; +c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ pg_catalogЙ & @@ -10299,8 +10300,9 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlр -╒INSERT INTO postgres_types +name0         Rbooksbtextzname: query.sql║ +М +INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10323,12 +10325,6 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10356,16 +10352,10 @@ VALUES ( $19, $20, $21, - $22::json, - $23::json, - $24::jsonb, - $25::jsonpath, - $26::xml, - $27::xml, - $28, - $29, - $30::macaddr, - $31::macaddr8 + $22, + $23, + $24::macaddr, + $25::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10392,21 +10382,13 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid* -c_json0         bjson*/+ -c_json_string_override0         bjson*! -c_jsonb0         bjsonb*'# - -c_jsonpath0         b -jsonpath* -c_xml0         bxml*-) -c_xml_string_override0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr8: query.sqlBpostgres_typesЗ +macaddr82 Basic types : query.sqlBpostgres_typesЗ ▄INSERT INTO postgres_types ( c_boolean, @@ -10485,8 +10467,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ -ЯSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ +┤SELECT c_boolean, c_bit, c_smallint, @@ -10508,12 +10490,6 @@ c_interval*NJ c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10561,15 +10537,6 @@ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_json0         Rpostgres_typesbjsonzc_json"U -c_json_string_override0         Rpostgres_typesbjsonzc_json_string_override"8 -c_jsonb0         Rpostgres_typesbjsonbzc_jsonb"A - -c_jsonpath0         Rpostgres_typesb -jsonpathz -c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"R -c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10679,40 +10646,55 @@ FROM postgres_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sql  -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle -) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesИ -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +∙ +INSERT INTO postgres_unstructured_types +( + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override ) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц -hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B -c_point0         Rpostgres_geometric_typesbpointzc_point"? -c_line0         Rpostgres_geometric_typesblinezc_line"? -c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< -c_box0         Rpostgres_geometric_typesbboxzc_box"? -c_path0         Rpostgres_geometric_typesbpathzc_path"H - c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E -c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlU -'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql═ -╬INSERT INTO postgres_array_types +VALUES ( + $1::json, + $2::json, + $3::jsonb, + $4::jsonpath, + $5::xml, + $6::xml +)InsertPostgresUnstructuredTypes:exec* +c_json0         bjson*/+ +c_json_string_override0         bjson*! +c_jsonb0         bjsonb*'# + +c_jsonpath0         b +jsonpath* +c_xml0         bxml*-) +c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн +ЪSELECT + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override +FROM postgres_unstructured_types +LIMIT 1GetPostgresUnstructuredTypes:one"B +c_json0         Rpostgres_unstructured_typesbjsonzc_json"b +c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E +c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpathz +c_jsonpath"? +c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ +c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla +*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +╧ +INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, @@ -10729,7 +10711,7 @@ VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresArrayTypes:exec*JF c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*XT c_date_array 0         Rpublicpostgres_array_typesbdatez c_date_arrayИ*rn -c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesе +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ2 Array types : query.sqlBpostgres_array_typesе ТSELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1GetPostgresArrayTypes:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ c_boolean_array 0         Rpostgres_array_typesb @@ -10753,4 +10735,48 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*╘{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ +л +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle2 Geometric types : query.sqlBpostgres_geometric_typesж +кINSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц +hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B +c_point0         Rpostgres_geometric_typesbpointzc_point"? +c_line0         Rpostgres_geometric_typesblinezc_line"? +c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< +c_box0         Rpostgres_geometric_typesbboxzc_box"? +c_path0         Rpostgres_geometric_typesbpathzc_path"H + c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E +c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlU +'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql"v1.27.0*╘{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index 365caf6a..cb869b53 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -44,27 +44,20 @@ public class PostgresType public string CCharacterVarying { get; set; } public string CBpchar { get; set; } public string CText { get; set; } - public JsonElement? CJson { get; set; } - public JsonElement? CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public XmlDocument CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } }; - public class PostgresGeometricType + public class PostgresUnstructuredType { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public JsonElement? CJson { get; set; } + public JsonElement? CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } }; public class PostgresArrayType { @@ -76,4 +69,14 @@ public class PostgresArrayType public DateTime[] CDateArray { get; set; } public DateTime[] CTimestampArray { get; set; } }; + public class PostgresGeometricType + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 0e189d61..9fb511e2 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -530,7 +530,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -554,12 +554,6 @@ public class InsertPostgresTypesArgs public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -589,12 +583,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_bpchar", args.CBpchar); queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -686,7 +674,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -710,12 +698,6 @@ public class GetPostgresTypesRow public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -813,32 +795,14 @@ public async Task GetPostgresFunctions() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesSql = "INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs - { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } - }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresTypesSql); } return; @@ -849,66 +813,36 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs - { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } - }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); - return result; + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -916,20 +850,28 @@ public async Task GetPostgresGeoTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow + { + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } + }; + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -937,17 +879,17 @@ public async Task TruncatePostgresTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); } return; @@ -958,10 +900,10 @@ public async Task TruncatePostgresGeoTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = "INSERT INTO postgres_array_types(c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; public class InsertPostgresArrayTypesArgs { public byte[] CBytea { get; set; } @@ -1100,5 +1042,132 @@ public async Task TruncatePostgresArrayTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } + + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task GetPostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + } } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 279df59c..72f60daa 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperLegacyExample", @@ -334,66 +336,6 @@ "name": "text" } }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, { "name": "c_cidr", "length": -1, @@ -448,77 +390,67 @@ }, { "rel": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - } - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" } }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" } }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" } }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" } }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" } }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" } } ] @@ -615,6 +547,83 @@ "arrayDims": 1 } ] + }, + { + "rel": { + "name": "postgres_geometric_types" + }, + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + } + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + } + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + } + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + } + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + } + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + } + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + } + } + ] } ] }, @@ -33217,7 +33226,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33559,66 +33568,6 @@ }, { "number": 22, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 23, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 24, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 25, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 26, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 27, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33634,7 +33583,7 @@ } }, { - "number": 29, + "number": 23, "column": { "name": "c_inet", "length": -1, @@ -33650,7 +33599,7 @@ } }, { - "number": 30, + "number": 24, "column": { "name": "c_macaddr", "length": -1, @@ -33660,7 +33609,7 @@ } }, { - "number": 31, + "number": 25, "column": { "name": "c_macaddr8", "length": -1, @@ -33670,6 +33619,9 @@ } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_types" @@ -34032,7 +33984,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34283,72 +34235,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - }, - "originalName": "c_jsonb" - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - }, - "originalName": "c_jsonpath" - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" - }, { "name": "c_cidr", "length": -1, @@ -34714,336 +34600,167 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_point", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - } - }, - { - "number": 2, - "column": { - "name": "c_line", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - } - }, - { - "number": 3, - "column": { - "name": "c_lseg", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - } - }, - { - "number": 4, - "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_geometric_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "point" - }, - "originalName": "c_point" + "name": "json" + } } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "line" - }, - "originalName": "c_line" + "name": "json" + } } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_jsonb", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "lseg" - }, - "originalName": "c_lseg" + "name": "jsonb" + } } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_jsonpath", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "box" - }, - "originalName": "c_box" + "name": "jsonpath" + } } }, { "number": 5, "column": { - "name": "c_path", + "name": "c_xml", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "path" - }, - "originalName": "c_path" + "name": "xml" + } } }, { "number": 6, "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" }, - "originalName": "c_line" + "originalName": "c_json" }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" }, - "originalName": "c_lseg" + "originalName": "c_json_string_override" }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" }, - "originalName": "c_box" + "originalName": "c_jsonb" }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" }, - "originalName": "c_path" + "originalName": "c_jsonpath" }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" }, - "originalName": "c_polygon" + "originalName": "c_xml" }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" }, - "originalName": "c_circle" + "originalName": "c_xml_string_override" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", "cmd": ":exec", "parameters": [ @@ -35165,6 +34882,9 @@ } } ], + "comments": [ + " Array types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_array_types" @@ -35330,6 +35050,332 @@ "name": "TruncatePostgresArrayTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 0adb6866..1924081a 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -1,9 +1,9 @@ -╧ +╕ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbЁ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunnerцц public"║publicГ +./dist/LocalRunner╓ч public"кpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextб + description0         Rbooksbtextс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -53,29 +53,22 @@ pg_catalogvarcharI pg_catalogvarchar1 c_bpchar0         Rpostgres_typesbbpchar- c_text0         Rpostgres_typesbtext- -c_json0         Rpostgres_typesbjson= -c_json_string_override0         Rpostgres_typesbjson/ -c_jsonb0         Rpostgres_typesbjsonb5 - -c_jsonpath0         Rpostgres_typesb -jsonpath+ -c_xml0         Rpostgres_typesbxml; -c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuid╡ -postgres_geometric_types9 -c_point0         Rpostgres_geometric_typesbpoint7 -c_line0         Rpostgres_geometric_typesbline7 -c_lseg0         Rpostgres_geometric_typesblseg5 -c_box0         Rpostgres_geometric_typesbbox7 -c_path0         Rpostgres_geometric_typesbpath= - c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleХ +c_uuid0         Rpostgres_typesbuuidн +postgres_unstructured_types: +c_json0         Rpostgres_unstructured_typesbjsonJ +c_json_string_override0         Rpostgres_unstructured_typesbjson< +c_jsonb0         Rpostgres_unstructured_typesbjsonbB + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpath8 +c_xml0         Rpostgres_unstructured_typesbxmlH +c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -87,7 +80,15 @@ pg_catalogint4 pg_catalognumericИ> c_date_array 0         Rpostgres_array_typesbdateИT c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampИ" pg_temp"ц▓ +pg_catalog timestampИ╡ +postgres_geometric_types9 +c_point0         Rpostgres_geometric_typesbpoint7 +c_line0         Rpostgres_geometric_typesbline7 +c_lseg0         Rpostgres_geometric_typesblseg5 +c_box0         Rpostgres_geometric_typesbbox7 +c_path0         Rpostgres_geometric_typesbpath= + c_polygon0         Rpostgres_geometric_typesb polygon; +c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ pg_catalogЙ & @@ -10299,8 +10300,9 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlр -╒INSERT INTO postgres_types +name0         Rbooksbtextzname: query.sql║ +М +INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10323,12 +10325,6 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10356,16 +10352,10 @@ VALUES ( $19, $20, $21, - $22::json, - $23::json, - $24::jsonb, - $25::jsonpath, - $26::xml, - $27::xml, - $28, - $29, - $30::macaddr, - $31::macaddr8 + $22, + $23, + $24::macaddr, + $25::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10392,21 +10382,13 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid* -c_json0         bjson*/+ -c_json_string_override0         bjson*! -c_jsonb0         bjsonb*'# - -c_jsonpath0         b -jsonpath* -c_xml0         bxml*-) -c_xml_string_override0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr8: query.sqlBpostgres_typesЗ +macaddr82 Basic types : query.sqlBpostgres_typesЗ ▄INSERT INTO postgres_types ( c_boolean, @@ -10485,8 +10467,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ -ЯSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ +┤SELECT c_boolean, c_bit, c_smallint, @@ -10508,12 +10490,6 @@ c_interval*NJ c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10561,15 +10537,6 @@ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_json0         Rpostgres_typesbjsonzc_json"U -c_json_string_override0         Rpostgres_typesbjsonzc_json_string_override"8 -c_jsonb0         Rpostgres_typesbjsonbzc_jsonb"A - -c_jsonpath0         Rpostgres_typesb -jsonpathz -c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"R -c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10679,40 +10646,55 @@ FROM postgres_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sql  -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle -) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesИ -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +∙ +INSERT INTO postgres_unstructured_types +( + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override ) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц -hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B -c_point0         Rpostgres_geometric_typesbpointzc_point"? -c_line0         Rpostgres_geometric_typesblinezc_line"? -c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< -c_box0         Rpostgres_geometric_typesbboxzc_box"? -c_path0         Rpostgres_geometric_typesbpathzc_path"H - c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E -c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlU -'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql═ -╬INSERT INTO postgres_array_types +VALUES ( + $1::json, + $2::json, + $3::jsonb, + $4::jsonpath, + $5::xml, + $6::xml +)InsertPostgresUnstructuredTypes:exec* +c_json0         bjson*/+ +c_json_string_override0         bjson*! +c_jsonb0         bjsonb*'# + +c_jsonpath0         b +jsonpath* +c_xml0         bxml*-) +c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн +ЪSELECT + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override +FROM postgres_unstructured_types +LIMIT 1GetPostgresUnstructuredTypes:one"B +c_json0         Rpostgres_unstructured_typesbjsonzc_json"b +c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E +c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpathz +c_jsonpath"? +c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ +c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla +*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +╧ +INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, @@ -10729,7 +10711,7 @@ VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresArrayTypes:exec*JF c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*XT c_date_array 0         Rpublicpostgres_array_typesbdatez c_date_arrayИ*rn -c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesе +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ2 Array types : query.sqlBpostgres_array_typesе ТSELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1GetPostgresArrayTypes:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ c_boolean_array 0         Rpostgres_array_typesb @@ -10753,4 +10735,48 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*т{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ +л +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle2 Geometric types : query.sqlBpostgres_geometric_typesж +кINSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц +hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B +c_point0         Rpostgres_geometric_typesbpointzc_point"? +c_line0         Rpostgres_geometric_typesblinezc_line"? +c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< +c_box0         Rpostgres_geometric_typesbboxzc_box"? +c_path0         Rpostgres_geometric_typesbpathzc_path"H + c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E +c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlU +'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql"v1.27.0*т{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index 609dc1e9..7226b913 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -10,6 +10,7 @@ namespace NpgsqlExampleGen; public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid); -public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); -public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); \ No newline at end of file +public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid); +public readonly record struct PostgresUnstructuredType(JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); +public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); +public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); \ No newline at end of file diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index e7fea5c1..db0e1a60 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -654,8 +654,8 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -685,12 +685,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -732,12 +726,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -790,8 +778,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -827,21 +815,10 @@ public async Task InsertPostgresTypesBatch(List ar CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CJson = reader.IsDBNull(21) ? null : JsonSerializer.Deserialize(reader.GetString(21)), - CJsonStringOverride = reader.IsDBNull(22) ? null : reader.GetString(22), - CJsonb = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonpath = reader.IsDBNull(24) ? null : reader.GetString(24), - CXml = reader.IsDBNull(25) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 25), - CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CCidr = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), - CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) + CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -887,21 +864,10 @@ public async Task InsertPostgresTypesBatch(List ar CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CJson = reader.IsDBNull(21) ? null : JsonSerializer.Deserialize(reader.GetString(21)), - CJsonStringOverride = reader.IsDBNull(22) ? null : reader.GetString(22), - CJsonb = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonpath = reader.IsDBNull(24) ? null : reader.GetString(24), - CXml = reader.IsDBNull(25) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 25), - CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CCidr = reader.IsDBNull(27) ? null : reader.GetFieldValue(27), - CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), - CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) + CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -1060,23 +1026,15 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string InsertPostgresGeoTypesSql = "INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public readonly record struct InsertPostgresGeoTypesArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1091,70 +1049,81 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = TruncatePostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresGeoTypesBatchArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task InsertPostgresGeoTypesBatch(List args) + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - await writer.CompleteAsync(); } - await connection.CloseAsync(); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresUnstructuredTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task GetPostgresGeoTypes() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresUnstructuredTypesRow { - CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) }; } } @@ -1171,21 +1140,25 @@ public async Task InsertPostgresGeoTypesBatch(List(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) }; } } @@ -1194,43 +1167,14 @@ public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task GetPostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresGeoTypesRow + { + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresGeoTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresGeoTypesRow + { + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresGeoTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } \ No newline at end of file diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index a626ea49..a0a6f84d 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlExample", @@ -334,66 +336,6 @@ "name": "text" } }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, { "name": "c_cidr", "length": -1, @@ -448,77 +390,67 @@ }, { "rel": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - } - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" } }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" } }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" } }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" } }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" } }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" } } ] @@ -615,6 +547,83 @@ "arrayDims": 1 } ] + }, + { + "rel": { + "name": "postgres_geometric_types" + }, + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + } + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + } + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + } + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + } + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + } + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + } + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + } + } + ] } ] }, @@ -33217,7 +33226,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33559,66 +33568,6 @@ }, { "number": 22, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 23, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 24, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 25, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 26, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 27, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33634,7 +33583,7 @@ } }, { - "number": 29, + "number": 23, "column": { "name": "c_inet", "length": -1, @@ -33650,7 +33599,7 @@ } }, { - "number": 30, + "number": 24, "column": { "name": "c_macaddr", "length": -1, @@ -33660,7 +33609,7 @@ } }, { - "number": 31, + "number": 25, "column": { "name": "c_macaddr8", "length": -1, @@ -33670,6 +33619,9 @@ } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_types" @@ -34032,7 +33984,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34283,72 +34235,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - }, - "originalName": "c_jsonb" - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - }, - "originalName": "c_jsonpath" - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" - }, { "name": "c_cidr", "length": -1, @@ -34714,336 +34600,167 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_point", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - } - }, - { - "number": 2, - "column": { - "name": "c_line", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - } - }, - { - "number": 3, - "column": { - "name": "c_lseg", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - } - }, - { - "number": 4, - "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_geometric_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "point" - }, - "originalName": "c_point" + "name": "json" + } } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "line" - }, - "originalName": "c_line" + "name": "json" + } } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_jsonb", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "lseg" - }, - "originalName": "c_lseg" + "name": "jsonb" + } } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_jsonpath", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "box" - }, - "originalName": "c_box" + "name": "jsonpath" + } } }, { "number": 5, "column": { - "name": "c_path", + "name": "c_xml", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "path" - }, - "originalName": "c_path" + "name": "xml" + } } }, { "number": 6, "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" }, - "originalName": "c_line" + "originalName": "c_json" }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" }, - "originalName": "c_lseg" + "originalName": "c_json_string_override" }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" }, - "originalName": "c_box" + "originalName": "c_jsonb" }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" }, - "originalName": "c_path" + "originalName": "c_jsonpath" }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" }, - "originalName": "c_polygon" + "originalName": "c_xml" }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" }, - "originalName": "c_circle" + "originalName": "c_xml_string_override" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", "cmd": ":exec", "parameters": [ @@ -35165,6 +34882,9 @@ } } ], + "comments": [ + " Array types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_array_types" @@ -35330,6 +35050,332 @@ "name": "TruncatePostgresArrayTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 98f46559..755f93db 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -1,9 +1,9 @@ -░ +Щ 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlb╤ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunnerцц public"║publicГ +./dist/LocalRunner╓ч public"кpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextб + description0         Rbooksbtextс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -53,29 +53,22 @@ pg_catalogvarcharI pg_catalogvarchar1 c_bpchar0         Rpostgres_typesbbpchar- c_text0         Rpostgres_typesbtext- -c_json0         Rpostgres_typesbjson= -c_json_string_override0         Rpostgres_typesbjson/ -c_jsonb0         Rpostgres_typesbjsonb5 - -c_jsonpath0         Rpostgres_typesb -jsonpath+ -c_xml0         Rpostgres_typesbxml; -c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuid╡ -postgres_geometric_types9 -c_point0         Rpostgres_geometric_typesbpoint7 -c_line0         Rpostgres_geometric_typesbline7 -c_lseg0         Rpostgres_geometric_typesblseg5 -c_box0         Rpostgres_geometric_typesbbox7 -c_path0         Rpostgres_geometric_typesbpath= - c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleХ +c_uuid0         Rpostgres_typesbuuidн +postgres_unstructured_types: +c_json0         Rpostgres_unstructured_typesbjsonJ +c_json_string_override0         Rpostgres_unstructured_typesbjson< +c_jsonb0         Rpostgres_unstructured_typesbjsonbB + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpath8 +c_xml0         Rpostgres_unstructured_typesbxmlH +c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -87,7 +80,15 @@ pg_catalogint4 pg_catalognumericИ> c_date_array 0         Rpostgres_array_typesbdateИT c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampИ" pg_temp"ц▓ +pg_catalog timestampИ╡ +postgres_geometric_types9 +c_point0         Rpostgres_geometric_typesbpoint7 +c_line0         Rpostgres_geometric_typesbline7 +c_lseg0         Rpostgres_geometric_typesblseg5 +c_box0         Rpostgres_geometric_typesbbox7 +c_path0         Rpostgres_geometric_typesbpath= + c_polygon0         Rpostgres_geometric_typesb polygon; +c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ pg_catalogЙ & @@ -10299,8 +10300,9 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlр -╒INSERT INTO postgres_types +name0         Rbooksbtextzname: query.sql║ +М +INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10323,12 +10325,6 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10356,16 +10352,10 @@ VALUES ( $19, $20, $21, - $22::json, - $23::json, - $24::jsonb, - $25::jsonpath, - $26::xml, - $27::xml, - $28, - $29, - $30::macaddr, - $31::macaddr8 + $22, + $23, + $24::macaddr, + $25::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10392,21 +10382,13 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid* -c_json0         bjson*/+ -c_json_string_override0         bjson*! -c_jsonb0         bjsonb*'# - -c_jsonpath0         b -jsonpath* -c_xml0         bxml*-) -c_xml_string_override0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr8: query.sqlBpostgres_typesЗ +macaddr82 Basic types : query.sqlBpostgres_typesЗ ▄INSERT INTO postgres_types ( c_boolean, @@ -10485,8 +10467,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ -ЯSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ +┤SELECT c_boolean, c_bit, c_smallint, @@ -10508,12 +10490,6 @@ c_interval*NJ c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10561,15 +10537,6 @@ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_json0         Rpostgres_typesbjsonzc_json"U -c_json_string_override0         Rpostgres_typesbjsonzc_json_string_override"8 -c_jsonb0         Rpostgres_typesbjsonbzc_jsonb"A - -c_jsonpath0         Rpostgres_typesb -jsonpathz -c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"R -c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10679,40 +10646,55 @@ FROM postgres_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sql  -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle -) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesИ -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +∙ +INSERT INTO postgres_unstructured_types +( + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override ) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц -hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B -c_point0         Rpostgres_geometric_typesbpointzc_point"? -c_line0         Rpostgres_geometric_typesblinezc_line"? -c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< -c_box0         Rpostgres_geometric_typesbboxzc_box"? -c_path0         Rpostgres_geometric_typesbpathzc_path"H - c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E -c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlU -'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql═ -╬INSERT INTO postgres_array_types +VALUES ( + $1::json, + $2::json, + $3::jsonb, + $4::jsonpath, + $5::xml, + $6::xml +)InsertPostgresUnstructuredTypes:exec* +c_json0         bjson*/+ +c_json_string_override0         bjson*! +c_jsonb0         bjsonb*'# + +c_jsonpath0         b +jsonpath* +c_xml0         bxml*-) +c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн +ЪSELECT + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override +FROM postgres_unstructured_types +LIMIT 1GetPostgresUnstructuredTypes:one"B +c_json0         Rpostgres_unstructured_typesbjsonzc_json"b +c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E +c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpathz +c_jsonpath"? +c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ +c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla +*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +╧ +INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, @@ -10729,7 +10711,7 @@ VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresArrayTypes:exec*JF c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*XT c_date_array 0         Rpublicpostgres_array_typesbdatez c_date_arrayИ*rn -c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesе +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ2 Array types : query.sqlBpostgres_array_typesе ТSELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1GetPostgresArrayTypes:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ c_boolean_array 0         Rpostgres_array_typesb @@ -10753,4 +10735,48 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*╧{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ +л +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle2 Geometric types : query.sqlBpostgres_geometric_typesж +кINSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц +hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B +c_point0         Rpostgres_geometric_typesbpointzc_point"? +c_line0         Rpostgres_geometric_typesblinezc_line"? +c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< +c_box0         Rpostgres_geometric_typesbboxzc_box"? +c_path0         Rpostgres_geometric_typesbpathzc_path"H + c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E +c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlU +'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql"v1.27.0*╧{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 1ddd96b2..a893b4ec 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -44,27 +44,20 @@ public class PostgresType public string CCharacterVarying { get; set; } public string CBpchar { get; set; } public string CText { get; set; } - public JsonElement? CJson { get; set; } - public JsonElement? CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public XmlDocument CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } }; - public class PostgresGeometricType + public class PostgresUnstructuredType { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public JsonElement? CJson { get; set; } + public JsonElement? CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } }; public class PostgresArrayType { @@ -76,4 +69,14 @@ public class PostgresArrayType public DateTime[] CDateArray { get; set; } public DateTime[] CTimestampArray { get; set; } }; + public class PostgresGeometricType + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; } \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index a72f22d8..2934d54c 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -752,7 +752,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } } - private const string InsertPostgresTypesSql = "INSERT INTO postgres_types(c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -776,12 +776,6 @@ public class InsertPostgresTypesArgs public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -816,12 +810,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -863,12 +851,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -946,7 +928,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -970,12 +952,6 @@ public class GetPostgresTypesRow public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -1016,21 +992,10 @@ public async Task GetPostgresTypes() CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CJson = reader.IsDBNull(21) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(21)), - CJsonStringOverride = reader.IsDBNull(22) ? null : reader.GetString(22), - CJsonb = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonpath = reader.IsDBNull(24) ? null : reader.GetString(24), - CXml = reader.IsDBNull(25) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 25), - CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CCidr = reader.IsDBNull(27) ? (NpgsqlCidr? )null : reader.GetFieldValue(27), - CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), - CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) + CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -1076,21 +1041,10 @@ public async Task GetPostgresTypes() CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CJson = reader.IsDBNull(21) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(21)), - CJsonStringOverride = reader.IsDBNull(22) ? null : reader.GetString(22), - CJsonb = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonpath = reader.IsDBNull(24) ? null : reader.GetString(24), - CXml = reader.IsDBNull(25) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 25), - CXmlStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CCidr = reader.IsDBNull(27) ? (NpgsqlCidr? )null : reader.GetFieldValue(27), - CInet = reader.IsDBNull(28) ? null : reader.GetFieldValue(28), - CMacaddr = reader.IsDBNull(29) ? null : reader.GetFieldValue(29), - CMacaddr8 = reader.IsDBNull(30) ? null : reader.GetString(30) + CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -1280,32 +1234,15 @@ public async Task GetPostgresFunctions() return null; } - private const string InsertPostgresGeoTypesSql = "INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs - { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } - }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1320,88 +1257,97 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = TruncatePostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task InsertPostgresGeoTypesBatch(List args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - await writer.CompleteAsync(); } - await connection.CloseAsync(); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresUnstructuredTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresUnstructuredTypesRow { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) }; } } @@ -1418,21 +1364,25 @@ public async Task GetPostgresGeoTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresGeoTypesSql; + command.CommandText = GetPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresUnstructuredTypesRow { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) }; } } @@ -1441,14 +1391,14 @@ public async Task GetPostgresGeoTypes() return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -1464,42 +1414,13 @@ public async Task TruncatePostgresTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresTypesSql; + command.CommandText = TruncatePostgresUnstructuredTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncatePostgresGeoTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresArrayTypesSql = "INSERT INTO postgres_array_types(c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; public class InsertPostgresArrayTypesArgs { public byte[] CBytea { get; set; } @@ -1732,5 +1653,195 @@ public async Task TruncatePostgresArrayTypes() await command.ExecuteNonQueryAsync(); } } + + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + { + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresGeoTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } + }; + public async Task GetPostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresGeoTypesRow + { + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresGeoTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresGeoTypesRow + { + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresGeoTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } } \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index c4a512ca..e5a51dac 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlLegacyExample", @@ -334,66 +336,6 @@ "name": "text" } }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - } - }, { "name": "c_cidr", "length": -1, @@ -448,77 +390,67 @@ }, { "rel": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - } - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" } }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" } }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" } }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" } }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" } }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" } } ] @@ -615,6 +547,83 @@ "arrayDims": 1 } ] + }, + { + "rel": { + "name": "postgres_geometric_types" + }, + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + } + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + } + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + } + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + } + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + } + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + } + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + } + } + ] } ] }, @@ -33217,7 +33226,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::json, \n $23::json, \n $24::jsonb,\n $25::jsonpath,\n $26::xml,\n $27::xml,\n $28,\n $29,\n $30::macaddr,\n $31::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -33559,66 +33568,6 @@ }, { "number": 22, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 23, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 24, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 25, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 26, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 27, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 28, "column": { "name": "c_cidr", "length": -1, @@ -33634,7 +33583,7 @@ } }, { - "number": 29, + "number": 23, "column": { "name": "c_inet", "length": -1, @@ -33650,7 +33599,7 @@ } }, { - "number": 30, + "number": 24, "column": { "name": "c_macaddr", "length": -1, @@ -33660,7 +33609,7 @@ } }, { - "number": 31, + "number": 25, "column": { "name": "c_macaddr8", "length": -1, @@ -33670,6 +33619,9 @@ } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_types" @@ -34032,7 +33984,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -34283,72 +34235,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonb" - }, - "originalName": "c_jsonb" - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "jsonpath" - }, - "originalName": "c_jsonpath" - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" - }, { "name": "c_cidr", "length": -1, @@ -34714,336 +34600,167 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_point", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - } - }, - { - "number": 2, - "column": { - "name": "c_line", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - } - }, - { - "number": 3, - "column": { - "name": "c_lseg", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - } - }, - { - "number": 4, - "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_geometric_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "point" - }, - "originalName": "c_point" + "name": "json" + } } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "line" - }, - "originalName": "c_line" + "name": "json" + } } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_jsonb", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "lseg" - }, - "originalName": "c_lseg" + "name": "jsonb" + } } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_jsonpath", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "box" - }, - "originalName": "c_box" + "name": "jsonpath" + } } }, { "number": 5, "column": { - "name": "c_path", + "name": "c_xml", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "path" - }, - "originalName": "c_path" + "name": "xml" + } } }, { "number": 6, "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", + "name": "c_json", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "line" + "name": "json" }, - "originalName": "c_line" + "originalName": "c_json" }, { - "name": "c_lseg", + "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "lseg" + "name": "json" }, - "originalName": "c_lseg" + "originalName": "c_json_string_override" }, { - "name": "c_box", + "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "box" + "name": "jsonb" }, - "originalName": "c_box" + "originalName": "c_jsonb" }, { - "name": "c_path", + "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "path" + "name": "jsonpath" }, - "originalName": "c_path" + "originalName": "c_jsonpath" }, { - "name": "c_polygon", + "name": "c_xml", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "polygon" + "name": "xml" }, - "originalName": "c_polygon" + "originalName": "c_xml" }, { - "name": "c_circle", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_geometric_types" + "name": "postgres_unstructured_types" }, "type": { - "name": "circle" + "name": "xml" }, - "originalName": "c_circle" + "originalName": "c_xml_string_override" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", "cmd": ":exec", "parameters": [ @@ -35165,6 +34882,9 @@ } } ], + "comments": [ + " Array types " + ], "filename": "query.sql", "insert_into_table": { "name": "postgres_array_types" @@ -35330,6 +35050,332 @@ "name": "TruncatePostgresArrayTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } + }, + { + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } + }, + { + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + } + }, + { + "number": 4, + "column": { + "name": "c_box", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + } + }, + { + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + } + }, + { + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + } + }, + { + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } + }, + { + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_point", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + }, + { + "name": "c_line", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + }, + { + "name": "c_lseg", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" + }, + { + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index 32727fcf..bdb24833 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -1,9 +1,9 @@ -─ +н 2 -postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/query.sqlbх +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunnerцц public"║publicГ +./dist/LocalRunner╓ч public"кpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,7 +13,7 @@ postgresql%examples/config/postgresql/schema.sql"$examples/config/postgresql/qu name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextб + description0         Rbooksbtextс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -53,29 +53,22 @@ pg_catalogvarcharI pg_catalogvarchar1 c_bpchar0         Rpostgres_typesbbpchar- c_text0         Rpostgres_typesbtext- -c_json0         Rpostgres_typesbjson= -c_json_string_override0         Rpostgres_typesbjson/ -c_jsonb0         Rpostgres_typesbjsonb5 - -c_jsonpath0         Rpostgres_typesb -jsonpath+ -c_xml0         Rpostgres_typesbxml; -c_xml_string_override0         Rpostgres_typesbxml- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuid╡ -postgres_geometric_types9 -c_point0         Rpostgres_geometric_typesbpoint7 -c_line0         Rpostgres_geometric_typesbline7 -c_lseg0         Rpostgres_geometric_typesblseg5 -c_box0         Rpostgres_geometric_typesbbox7 -c_path0         Rpostgres_geometric_typesbpath= - c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleХ +c_uuid0         Rpostgres_typesbuuidн +postgres_unstructured_types: +c_json0         Rpostgres_unstructured_typesbjsonJ +c_json_string_override0         Rpostgres_unstructured_typesbjson< +c_jsonb0         Rpostgres_unstructured_typesbjsonbB + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpath8 +c_xml0         Rpostgres_unstructured_typesbxmlH +c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -87,7 +80,15 @@ pg_catalogint4 pg_catalognumericИ> c_date_array 0         Rpostgres_array_typesbdateИT c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampИ" pg_temp"ц▓ +pg_catalog timestampИ╡ +postgres_geometric_types9 +c_point0         Rpostgres_geometric_typesbpoint7 +c_line0         Rpostgres_geometric_typesbline7 +c_lseg0         Rpostgres_geometric_typesblseg5 +c_box0         Rpostgres_geometric_typesbbox7 +c_path0         Rpostgres_geometric_typesbpath= + c_polygon0         Rpostgres_geometric_typesb polygon; +c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ pg_catalogЙ & @@ -10299,8 +10300,9 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sqlр -╒INSERT INTO postgres_types +name0         Rbooksbtextzname: query.sql║ +М +INSERT INTO postgres_types ( c_boolean, c_bit, @@ -10323,12 +10325,6 @@ WHERE books.name = $1GetAuthorsByBookName:many"- c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10356,16 +10352,10 @@ VALUES ( $19, $20, $21, - $22::json, - $23::json, - $24::jsonb, - $25::jsonpath, - $26::xml, - $27::xml, - $28, - $29, - $30::macaddr, - $31::macaddr8 + $22, + $23, + $24::macaddr, + $25::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10392,21 +10382,13 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid* -c_json0         bjson*/+ -c_json_string_override0         bjson*! -c_jsonb0         bjsonb*'# - -c_jsonpath0         b -jsonpath* -c_xml0         bxml*-) -c_xml_string_override0         bxml*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr8: query.sqlBpostgres_typesЗ +macaddr82 Basic types : query.sqlBpostgres_typesЗ ▄INSERT INTO postgres_types ( c_boolean, @@ -10485,8 +10467,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЬ -ЯSELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ +┤SELECT c_boolean, c_bit, c_smallint, @@ -10508,12 +10490,6 @@ c_interval*NJ c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -10561,15 +10537,6 @@ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_json0         Rpostgres_typesbjsonzc_json"U -c_json_string_override0         Rpostgres_typesbjsonzc_json_string_override"8 -c_jsonb0         Rpostgres_typesbjsonbzc_jsonb"A - -c_jsonpath0         Rpostgres_typesb -jsonpathz -c_jsonpath"2 -c_xml0         Rpostgres_typesbxmlzc_xml"R -c_xml_string_override0         Rpostgres_typesbxmlzc_xml_string_override"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10679,40 +10646,55 @@ FROM postgres_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sql  -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle -) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesИ -МINSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +∙ +INSERT INTO postgres_unstructured_types +( + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override ) -VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ -c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG -c_line0         R"publicpostgres_geometric_typesblinezc_line*KG -c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD -c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG -c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP - c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM -c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц -hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B -c_point0         Rpostgres_geometric_typesbpointzc_point"? -c_line0         Rpostgres_geometric_typesblinezc_line"? -c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< -c_box0         Rpostgres_geometric_typesbboxzc_box"? -c_path0         Rpostgres_geometric_typesbpathzc_path"H - c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E -c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlU -'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql═ -╬INSERT INTO postgres_array_types +VALUES ( + $1::json, + $2::json, + $3::jsonb, + $4::jsonpath, + $5::xml, + $6::xml +)InsertPostgresUnstructuredTypes:exec* +c_json0         bjson*/+ +c_json_string_override0         bjson*! +c_jsonb0         bjsonb*'# + +c_jsonpath0         b +jsonpath* +c_xml0         bxml*-) +c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн +ЪSELECT + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override +FROM postgres_unstructured_types +LIMIT 1GetPostgresUnstructuredTypes:one"B +c_json0         Rpostgres_unstructured_typesbjsonzc_json"b +c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E +c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N + +c_jsonpath0         Rpostgres_unstructured_typesb +jsonpathz +c_jsonpath"? +c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ +c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla +*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +╧ +INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, @@ -10729,7 +10711,7 @@ VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresArrayTypes:exec*JF c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*XT c_date_array 0         Rpublicpostgres_array_typesbdatez c_date_arrayИ*rn -c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesе +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ2 Array types : query.sqlBpostgres_array_typesе ТSELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1GetPostgresArrayTypes:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ c_boolean_array 0         Rpostgres_array_typesb @@ -10753,4 +10735,48 @@ GROUP BY LIMIT 1GetPostgresArrayTypesCnt:one"> c_bytea0         Rpostgres_array_typesbbyteazc_bytea" cnt0         @bbigint: query.sqlS -#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql"v1.27.0*▌{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +#TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ +л +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypes:exec*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle2 Geometric types : query.sqlBpostgres_geometric_typesж +кINSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7)InsertPostgresGeoTypesBatch :copyfrom*NJ +c_point0         R"publicpostgres_geometric_typesbpointzc_point*KG +c_line0         R"publicpostgres_geometric_typesblinezc_line*KG +c_lseg0         R"publicpostgres_geometric_typesblsegzc_lseg*HD +c_box0         R"publicpostgres_geometric_typesbboxzc_box*KG +c_path0         R"publicpostgres_geometric_typesbpathzc_path*TP + c_polygon0         R"publicpostgres_geometric_typesb polygonz c_polygon*QM +c_circle0         R"publicpostgres_geometric_typesbcirclezc_circle: query.sqlBpostgres_geometric_typesц +hSELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1GetPostgresGeoTypes:one"B +c_point0         Rpostgres_geometric_typesbpointzc_point"? +c_line0         Rpostgres_geometric_typesblinezc_line"? +c_lseg0         Rpostgres_geometric_typesblsegzc_lseg"< +c_box0         Rpostgres_geometric_typesbboxzc_box"? +c_path0         Rpostgres_geometric_typesbpathzc_path"H + c_polygon0         Rpostgres_geometric_typesb polygonz c_polygon"E +c_circle0         Rpostgres_geometric_typesbcirclezc_circle: query.sqlU +'TRUNCATE TABLE postgres_geometric_typesTruncatePostgresGeoTypes:exec: query.sql"v1.27.0*▌{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/config/postgresql/Dockerfile b/examples/config/postgresql/Dockerfile index b9546fbc..75eab979 100644 --- a/examples/config/postgresql/Dockerfile +++ b/examples/config/postgresql/Dockerfile @@ -1,2 +1,6 @@ FROM postgres:16.2 -COPY schema.sql /docker-entrypoint-initdb.d \ No newline at end of file + +COPY types/schema.sql types_schema.sql +COPY authors/schema.sql authors_schema.sql + +RUN (cat types_schema.sql && echo && cat authors_schema.sql) > /docker-entrypoint-initdb.d/schema.sql diff --git a/examples/config/postgresql/authors/query.sql b/examples/config/postgresql/authors/query.sql new file mode 100644 index 00000000..32521d57 --- /dev/null +++ b/examples/config/postgresql/authors/query.sql @@ -0,0 +1,71 @@ +-- name: GetAuthor :one +SELECT * FROM authors +WHERE name = $1 LIMIT 1; + +-- name: ListAuthors :many +SELECT * +FROM authors +ORDER BY name +LIMIT sqlc.arg('limit') +OFFSET sqlc.arg('offset'); + +-- name: CreateAuthor :one +INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING *; + +-- name: CreateAuthorReturnId :execlastid +INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id; + +-- name: GetAuthorById :one +SELECT * FROM authors +WHERE id = $1 LIMIT 1; + +-- name: GetAuthorByNamePattern :many +SELECT * FROM authors +WHERE name LIKE COALESCE(sqlc.narg('name_pattern'), '%'); + +-- name: DeleteAuthor :exec +DELETE FROM authors +WHERE name = $1; + +-- name: TruncateAuthors :exec +TRUNCATE TABLE authors CASCADE; + +-- name: UpdateAuthors :execrows +UPDATE authors +SET bio = $1 +WHERE bio IS NOT NULL; + +-- name: GetAuthorsByIds :many +SELECT * FROM authors +WHERE id = ANY($1::BIGINT []); + +-- name: GetAuthorsByIdsAndNames :many +SELECT * +FROM authors +WHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT []);; + +-- name: CreateBook :execlastid +INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id; + +-- name: ListAllAuthorsBooks :many +SELECT + sqlc.embed(authors), + sqlc.embed(books) +FROM authors +INNER JOIN books ON authors.id = books.author_id +ORDER BY authors.name; + +-- name: GetDuplicateAuthors :many +SELECT + sqlc.embed(authors1), + sqlc.embed(authors2) +FROM authors AS authors1 +INNER JOIN authors AS authors2 ON authors1.name = authors2.name +WHERE authors1.id < authors2.id; + +-- name: GetAuthorsByBookName :many +SELECT + authors.*, + sqlc.embed(books) +FROM authors INNER JOIN books ON authors.id = books.author_id +WHERE books.name = $1; diff --git a/examples/config/postgresql/authors/schema.sql b/examples/config/postgresql/authors/schema.sql new file mode 100644 index 00000000..24dbe2b6 --- /dev/null +++ b/examples/config/postgresql/authors/schema.sql @@ -0,0 +1,13 @@ +CREATE TABLE authors ( + id BIGSERIAL PRIMARY KEY, + name TEXT NOT NULL, + bio TEXT +); + +CREATE TABLE books ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + name TEXT NOT NULL, + author_id BIGINT NOT NULL, + description TEXT, + FOREIGN KEY (author_id) REFERENCES authors (id) ON DELETE CASCADE +); diff --git a/examples/config/postgresql/query.sql b/examples/config/postgresql/types/query.sql similarity index 71% rename from examples/config/postgresql/query.sql rename to examples/config/postgresql/types/query.sql index 967a1252..f16fcb31 100644 --- a/examples/config/postgresql/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -1,74 +1,4 @@ --- name: GetAuthor :one -SELECT * FROM authors -WHERE name = $1 LIMIT 1; - --- name: ListAuthors :many -SELECT * -FROM authors -ORDER BY name -LIMIT sqlc.arg('limit') -OFFSET sqlc.arg('offset'); - --- name: CreateAuthor :one -INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING *; - --- name: CreateAuthorReturnId :execlastid -INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id; - --- name: GetAuthorById :one -SELECT * FROM authors -WHERE id = $1 LIMIT 1; - --- name: GetAuthorByNamePattern :many -SELECT * FROM authors -WHERE name LIKE COALESCE(sqlc.narg('name_pattern'), '%'); - --- name: DeleteAuthor :exec -DELETE FROM authors -WHERE name = $1; - --- name: TruncateAuthors :exec -TRUNCATE TABLE authors CASCADE; - --- name: UpdateAuthors :execrows -UPDATE authors -SET bio = $1 -WHERE bio IS NOT NULL; - --- name: GetAuthorsByIds :many -SELECT * FROM authors -WHERE id = ANY($1::BIGINT []); - --- name: GetAuthorsByIdsAndNames :many -SELECT * -FROM authors -WHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT []);; - --- name: CreateBook :execlastid -INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id; - --- name: ListAllAuthorsBooks :many -SELECT - sqlc.embed(authors), - sqlc.embed(books) -FROM authors -INNER JOIN books ON authors.id = books.author_id -ORDER BY authors.name; - --- name: GetDuplicateAuthors :many -SELECT - sqlc.embed(authors1), - sqlc.embed(authors2) -FROM authors AS authors1 -INNER JOIN authors AS authors2 ON authors1.name = authors2.name -WHERE authors1.id < authors2.id; - --- name: GetAuthorsByBookName :many -SELECT - authors.*, - sqlc.embed(books) -FROM authors INNER JOIN books ON authors.id = books.author_id -WHERE books.name = $1; +/* Basic types */ -- name: InsertPostgresTypes :exec INSERT INTO postgres_types @@ -94,12 +24,6 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -127,12 +51,6 @@ VALUES ( sqlc.narg('c_bpchar'), sqlc.narg('c_text'), sqlc.narg('c_uuid'), - sqlc.narg('c_json')::json, - sqlc.narg('c_json_string_override')::json, - sqlc.narg('c_jsonb')::jsonb, - sqlc.narg('c_jsonpath')::jsonpath, - sqlc.narg('c_xml')::xml, - sqlc.narg('c_xml_string_override')::xml, sqlc.narg('c_cidr'), sqlc.narg('c_inet'), sqlc.narg('c_macaddr')::macaddr, @@ -215,12 +133,6 @@ SELECT c_bpchar, c_text, c_uuid, - c_json, - c_json_string_override, - c_jsonb, - c_jsonpath, - c_xml, - c_xml_string_override, c_cidr, c_inet, c_macaddr, @@ -288,26 +200,45 @@ SELECT MAX(c_timestamp) AS max_timestamp FROM postgres_types; --- name: InsertPostgresGeoTypes :exec -INSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle -) -VALUES ($1, $2, $3, $4, $5, $6, $7); +-- name: TruncatePostgresTypes :exec +TRUNCATE TABLE postgres_types; --- name: InsertPostgresGeoTypesBatch :copyfrom -INSERT INTO postgres_geometric_types ( - c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle +/* Unstructured types */ + +-- name: InsertPostgresUnstructuredTypes :exec +INSERT INTO postgres_unstructured_types +( + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override ) -VALUES ($1, $2, $3, $4, $5, $6, $7); +VALUES ( + sqlc.narg('c_json')::json, + sqlc.narg('c_json_string_override')::json, + sqlc.narg('c_jsonb')::jsonb, + sqlc.narg('c_jsonpath')::jsonpath, + sqlc.narg('c_xml')::xml, + sqlc.narg('c_xml_string_override')::xml +); --- name: GetPostgresGeoTypes :one -SELECT * FROM postgres_geometric_types LIMIT 1; +-- name: GetPostgresUnstructuredTypes :one +SELECT + c_json, + c_json_string_override, + c_jsonb, + c_jsonpath, + c_xml, + c_xml_string_override +FROM postgres_unstructured_types +LIMIT 1; --- name: TruncatePostgresTypes :exec -TRUNCATE TABLE postgres_types; +-- name: TruncatePostgresUnstructuredTypes :exec +TRUNCATE TABLE postgres_unstructured_types; --- name: TruncatePostgresGeoTypes :exec -TRUNCATE TABLE postgres_geometric_types; +/* Array types */ -- name: InsertPostgresArrayTypes :exec INSERT INTO postgres_array_types @@ -338,4 +269,37 @@ GROUP BY LIMIT 1; -- name: TruncatePostgresArrayTypes :exec -TRUNCATE TABLE postgres_array_types; \ No newline at end of file +TRUNCATE TABLE postgres_array_types; + + +/* Geometric types */ + +-- name: InsertPostgresGeoTypes :exec +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7); + +-- name: InsertPostgresGeoTypesBatch :copyfrom +INSERT INTO postgres_geometric_types ( + c_point, + c_line, + c_lseg, + c_box, + c_path, + c_polygon, + c_circle +) +VALUES ($1, $2, $3, $4, $5, $6, $7); + +-- name: GetPostgresGeoTypes :one +SELECT * FROM postgres_geometric_types LIMIT 1; + +-- name: TruncatePostgresGeoTypes :exec +TRUNCATE TABLE postgres_geometric_types; diff --git a/examples/config/postgresql/schema.sql b/examples/config/postgresql/types/schema.sql similarity index 79% rename from examples/config/postgresql/schema.sql rename to examples/config/postgresql/types/schema.sql index 178185d4..31b8b941 100644 --- a/examples/config/postgresql/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -1,19 +1,5 @@ -CREATE TABLE authors ( - id BIGSERIAL PRIMARY KEY, - name TEXT NOT NULL, - bio TEXT -); - CREATE EXTENSION "uuid-ossp"; -CREATE TABLE books ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - name TEXT NOT NULL, - author_id BIGINT NOT NULL, - description TEXT, - FOREIGN KEY (author_id) REFERENCES authors (id) ON DELETE CASCADE -); - CREATE TABLE postgres_types ( /* Numeric Data Types */ c_boolean BOOLEAN, @@ -41,14 +27,6 @@ CREATE TABLE postgres_types ( c_bpchar BPCHAR(100), c_text TEXT, - /* Unstructured Data Types */ - c_json JSON, - c_json_string_override JSON, - c_jsonb JSONB, - c_jsonpath JSONPATH, - c_xml XML, - c_xml_string_override XML, - /* Network Address Data Types */ c_cidr CIDR, c_inet INET, @@ -59,14 +37,13 @@ CREATE TABLE postgres_types ( c_uuid UUID ); -CREATE TABLE postgres_geometric_types ( - c_point POINT, - c_line LINE, - c_lseg LSEG, - c_box BOX, - c_path PATH, - c_polygon POLYGON, - c_circle CIRCLE +CREATE TABLE postgres_unstructured_types ( + c_json JSON, + c_json_string_override JSON, + c_jsonb JSONB, + c_jsonpath JSONPATH, + c_xml XML, + c_xml_string_override XML ); CREATE TABLE postgres_array_types ( @@ -77,4 +54,14 @@ CREATE TABLE postgres_array_types ( c_decimal_array DECIMAL(10, 7) [], c_date_array DATE [], c_timestamp_array TIMESTAMP [] +); + +CREATE TABLE postgres_geometric_types ( + c_point POINT, + c_line LINE, + c_lseg LSEG, + c_box BOX, + c_path PATH, + c_polygon POLYGON, + c_circle CIRCLE ); \ No newline at end of file diff --git a/scripts/sync_sqlc_options.sh b/scripts/sync_sqlc_options.sh index ac99e8a8..41daf5e9 100755 --- a/scripts/sync_sqlc_options.sh +++ b/scripts/sync_sqlc_options.sh @@ -14,11 +14,9 @@ cp "$LOCAL_YAML" "$TMP_LOCAL_YML" sql_count=$(yq '.sql | length' "$CI_YAML") for ((i=0; i Date: Thu, 14 Aug 2025 11:09:44 +0200 Subject: [PATCH 05/33] fix: postgres legacy end2end tests --- .github/workflows/legacy-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/legacy-tests.yml b/.github/workflows/legacy-tests.yml index d0a573f4..c6b3b02d 100644 --- a/.github/workflows/legacy-tests.yml +++ b/.github/workflows/legacy-tests.yml @@ -51,7 +51,7 @@ jobs: - name: Init PostgresSQL Schema shell: powershell - run: psql -U $Env:POSTGRES_USER -f 'examples/config/postgresql/schema.sql' + run: psql -U $Env:POSTGRES_USER -f 'examples/config/postgresql/types/schema.sql' -f 'examples/config/postgresql/authors/schema.sql' env: PGSERVICE: ${{ steps.postgres.outputs.service-name }} PGPASSWORD: ${{ env.POSTGRES_PASSWORD }} From 51c86823490757492e56750ccedfa1f4ee09d3b7 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 11:29:57 +0200 Subject: [PATCH 06/33] fix: split mysql authors and data types tables to different files --- .github/workflows/legacy-tests.yml | 2 +- .../MySqlConnectorDapperExample/Models.cs | 26 +- .../MySqlConnectorDapperExample/QuerySql.cs | 174 +- examples/MySqlConnectorDapperExample/Utils.cs | 22 +- .../MySqlConnectorDapperExample/request.json | 486 +-- .../request.message | Bin 21327 -> 21422 bytes .../Models.cs | 26 +- .../QuerySql.cs | 1140 +++--- .../request.json | 3074 +++++++++-------- .../request.message | Bin 21361 -> 21456 bytes examples/MySqlConnectorExample/Models.cs | 2 +- examples/MySqlConnectorExample/QuerySql.cs | 258 +- examples/MySqlConnectorExample/request.json | 486 +-- .../MySqlConnectorExample/request.message | Bin 21311 -> 21406 bytes .../MySqlConnectorLegacyExample/Models.cs | 26 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 1802 +++++----- .../MySqlConnectorLegacyExample/request.json | 3074 +++++++++-------- .../request.message | Bin 21345 -> 21440 bytes examples/NpgsqlDapperExample/Models.cs | 26 +- examples/NpgsqlDapperExample/QuerySql.cs | 1332 +++---- examples/NpgsqlDapperExample/request.json | 2164 ++++++------ examples/NpgsqlDapperExample/request.message | 212 +- examples/NpgsqlDapperLegacyExample/Models.cs | 26 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 1332 +++---- .../NpgsqlDapperLegacyExample/request.json | 2164 ++++++------ .../NpgsqlDapperLegacyExample/request.message | 212 +- examples/NpgsqlExample/Models.cs | 6 +- examples/NpgsqlExample/QuerySql.cs | 1618 ++++----- examples/NpgsqlExample/request.json | 2164 ++++++------ examples/NpgsqlExample/request.message | 212 +- examples/NpgsqlLegacyExample/Models.cs | 26 +- examples/NpgsqlLegacyExample/QuerySql.cs | 2158 ++++++------ examples/NpgsqlLegacyExample/request.json | 2164 ++++++------ examples/NpgsqlLegacyExample/request.message | 212 +- examples/config/mysql/Dockerfile | 6 +- examples/config/mysql/authors/query.sql | 66 + examples/config/mysql/authors/schema.sql | 23 + examples/config/mysql/query.sql | 108 - examples/config/mysql/types/query.sql | 150 + examples/config/mysql/{ => types}/schema.sql | 26 +- sqlc.ci.yaml | 32 +- sqlc.local.generated.yaml | 32 +- sqlc.request.generated.yaml | 32 +- 43 files changed, 13610 insertions(+), 13491 deletions(-) create mode 100644 examples/config/mysql/authors/query.sql create mode 100644 examples/config/mysql/authors/schema.sql delete mode 100644 examples/config/mysql/query.sql create mode 100644 examples/config/mysql/types/query.sql rename examples/config/mysql/{ => types}/schema.sql (70%) diff --git a/.github/workflows/legacy-tests.yml b/.github/workflows/legacy-tests.yml index c6b3b02d..132ee38c 100644 --- a/.github/workflows/legacy-tests.yml +++ b/.github/workflows/legacy-tests.yml @@ -101,7 +101,7 @@ jobs: $env:Path += ";C:\Program Files\MySQL\MySQL Server 8.0\bin" [Environment]::SetEnvironmentVariable("Path", $env:Path, "Machine") mysql -u root -e "SET GLOBAL local_infile=1; CREATE DATABASE $Env:TESTS_DB;" - mysql -u root $Env:TESTS_DB --execute="source examples/config/mysql/schema.sql" + mysql -u root $Env:TESTS_DB --execute="source examples/config/mysql/types/schema.sql; source examples/config/mysql/authors/schema.sql;" - name: Run Tests shell: powershell diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index db9ff9a9..a5e4cf22 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -5,19 +5,6 @@ using System.Text.Json; namespace MySqlConnectorDapperExampleGen; -public class Author -{ - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } -}; -public class Book -{ - public required long Id { get; init; } - public required string Name { get; init; } - public required long AuthorId { get; init; } - public string? Description { get; init; } -}; public class MysqlType { public bool? CBool { get; init; } @@ -60,6 +47,19 @@ public class MysqlType public byte[]? CMediumblob { get; init; } public byte[]? CLongblob { get; init; } }; +public class Author +{ + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } +}; +public class Book +{ + public required long Id { get; init; } + public required string Name { get; init; } + public required long AuthorId { get; init; } + public string? Description { get; init; } +}; public class ExtendedBio { public string? AuthorName { get; init; } diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index 9adde76a..16349bf0 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -528,6 +528,93 @@ public async Task> GetAuthorsByBookName(GetAuthors } } + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; + public class CreateExtendedBioArgs + { + public string? AuthorName { get; init; } + public string? Name { get; init; } + public ExtendedBiosBioType? BioType { get; init; } + public HashSet? AuthorType { get; init; } + }; + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType); + queryParams.Add("author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : null); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow + { + public string? AuthorName { get; init; } + public string? Name { get; init; } + public ExtendedBiosBioType? BioType { get; init; } + public HashSet? AuthorType { get; init; } + }; + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBiosBioType? BioType { get; init; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncateExtendedBiosSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + } + private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; public class InsertMysqlTypesArgs { @@ -901,91 +988,4 @@ public async Task TruncateMysqlTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } - - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public class CreateExtendedBioArgs - { - public string? AuthorName { get; init; } - public string? Name { get; init; } - public ExtendedBiosBioType? BioType { get; init; } - public HashSet? AuthorType { get; init; } - }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("author_name", args.AuthorName); - queryParams.Add("name", args.Name); - queryParams.Add("bio_type", args.BioType); - queryParams.Add("author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : null); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); - } - - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow - { - public string? AuthorName { get; init; } - public string? Name { get; init; } - public ExtendedBiosBioType? BioType { get; init; } - public HashSet? AuthorType { get; init; } - }; - public class GetFirstExtendedBioByTypeArgs - { - public ExtendedBiosBioType? BioType { get; init; } - }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("bio_type", args.BioType); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); - } - - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.ExecuteAsync(TruncateExtendedBiosSql); - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); - } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/Utils.cs b/examples/MySqlConnectorDapperExample/Utils.cs index 5d7b107b..a34ee9ea 100644 --- a/examples/MySqlConnectorDapperExample/Utils.cs +++ b/examples/MySqlConnectorDapperExample/Utils.cs @@ -29,8 +29,8 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -39,31 +39,31 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler> + private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlTypesCSetSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToExtendedBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } } - private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> + private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToExtendedBiosAuthorTypeSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToMysqlTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index c9ba1e68..64fdd9a1 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/schema.sql" + "examples/config/mysql/types/schema.sql", + "examples/config/mysql/authors/schema.sql" ], "queries": [ - "examples/config/mysql/query.sql" + "examples/config/mysql/authors/query.sql", + "examples/config/mysql/types/query.sql" ], "codegen": { "out": "examples/MySqlConnectorDapperExample", @@ -23,95 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "mysql_types" @@ -508,6 +421,95 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ], "enums": [ @@ -1305,6 +1307,157 @@ ], "filename": "query.sql" }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } + }, + { + "number": 4, + "column": { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + }, + { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, { "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", @@ -3379,157 +3532,6 @@ "name": "TruncateMysqlTypes", "cmd": ":exec", "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" - } - }, - { - "number": 2, - "column": { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - } - }, - { - "number": 3, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } - }, - { - "number": 4, - "column": { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "schema": "extended", - "name": "bios" - } - }, - { - "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", - "name": "GetFirstExtendedBioByType", - "cmd": ":one", - "columns": [ - { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" - }, - { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - }, - { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - }, - { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", - "cmd": ":exec", - "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 124f5e87f46195b43bba573dac76c6347d9f8f52..a46f273ae05a70e10a8a8b154e080414a4d64784 100644 GIT binary patch delta 134 zcmX@VjB(v^Mt-jMtXzynLae!!#f3RiY7-5`gi0z4Qj7JAlQU9t6ZJr%8bDEz#L|+C z{32v=C3T>YtG8r>&e#>=UU~{i;8}sJf N(iaUk3kTXT0RZOqF2Mi* delta 75 zcmZ3tobmiJMt-gdtXzynLae!!#f3Ri3KJ8>c#4xVQgajafP5u+Ab;bsWTwqhTt CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_bool", args.CBool); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_tinyint", args.CTinyint); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_mediumint", args.CMediumint); + queryParams.Add("c_int", args.CInt); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_dec", args.CDec); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_fixed", args.CFixed); + queryParams.Add("c_float", args.CFloat); + queryParams.Add("c_double", args.CDouble); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_nchar", args.CNchar); + queryParams.Add("c_national_char", args.CNationalChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_tinytext", args.CTinytext); + queryParams.Add("c_mediumtext", args.CMediumtext); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_longtext", args.CLongtext); + queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_enum", args.CEnum); + queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); + queryParams.Add("c_year", args.CYear); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_datetime", args.CDatetime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_binary", args.CBinary); + queryParams.Add("c_varbinary", args.CVarbinary); + queryParams.Add("c_tinyblob", args.CTinyblob); + queryParams.Add("c_blob", args.CBlob); + queryParams.Add("c_mediumblob", args.CMediumblob); + queryParams.Add("c_longblob", args.CLongblob); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlTypesBatchArgs + { + public byte? CBit { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; + public class GetMysqlTypesRow + { + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public class GetMysqlTypesCntRow + { + public long Cnt { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public byte? CBit { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); + } + + private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; + public class GetMysqlFunctionsRow + { + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } + }; + public async Task GetMysqlFunctions() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); + } + + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncateMysqlTypesSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -221,405 +593,75 @@ public async Task> GetAuthorByNamePattern(GetAut return result.AsList(); } } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); - } - - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public class DeleteAuthorArgs - { - public string Name { get; set; } - }; - public async Task DeleteAuthor(DeleteAuthorArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); - } - - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.ExecuteAsync(DeleteAllAuthorsSql); - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); - } - - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public class UpdateAuthorsArgs - { - public string Bio { get; set; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); - } - - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs - { - public long[] Ids { get; set; } - }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) - { - var transformedSql = GetAuthorsByIdsSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - var queryParams = new Dictionary(); - for (int i = 0; i < args.Ids.Length; i++) - queryParams.Add($"@idsArg{i}", args.Ids[i]); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(transformedSql, queryParams); - return result.AsList(); - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); - } - - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public long[] Ids { get; set; } - public string[] Names { get; set; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) - { - var transformedSql = GetAuthorsByIdsAndNamesSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); - var queryParams = new Dictionary(); - for (int i = 0; i < args.Ids.Length; i++) - queryParams.Add($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - queryParams.Add($"@namesArg{i}", args.Names[i]); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(transformedSql, queryParams); - return result.AsList(); - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); - } - - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id); SELECT LAST_INSERT_ID()"; - public class CreateBookArgs - { - public string Name { get; set; } - public long AuthorId { get; set; } - }; - public async Task CreateBook(CreateBookArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); - } - - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public class ListAllAuthorsBooksRow - { - public Author Author { get; set; } - public Book Book { get; set; } - }; - public async Task> ListAllAuthorsBooks() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public class GetDuplicateAuthorsRow - { - public Author Author { get; set; } - public Author Author2 { get; set; } - }; - public async Task> GetDuplicateAuthors() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public class DeleteAuthorArgs { public string Name { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task DeleteAuthor(DeleteAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + } + + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + await connection.ExecuteAsync(DeleteAllAuthorsSql); } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } + + await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; - public class InsertMysqlTypesArgs + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public class UpdateAuthorsArgs { - public byte? CBit { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CFloat { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + public async Task UpdateAuthors(UpdateAuthorsArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_bool", args.CBool); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_tinyint", args.CTinyint); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_mediumint", args.CMediumint); - queryParams.Add("c_int", args.CInt); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_dec", args.CDec); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_fixed", args.CFixed); - queryParams.Add("c_float", args.CFloat); - queryParams.Add("c_double", args.CDouble); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_nchar", args.CNchar); - queryParams.Add("c_national_char", args.CNationalChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_tinytext", args.CTinytext); - queryParams.Add("c_mediumtext", args.CMediumtext); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_longtext", args.CLongtext); - queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_enum", args.CEnum); - queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); - queryParams.Add("c_year", args.CYear); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_datetime", args.CDatetime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_binary", args.CBinary); - queryParams.Add("c_varbinary", args.CVarbinary); - queryParams.Add("c_tinyblob", args.CTinyblob); - queryParams.Add("c_blob", args.CBlob); - queryParams.Add("c_mediumblob", args.CMediumblob); - queryParams.Add("c_longblob", args.CLongblob); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -627,160 +669,93 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlTypesBatchArgs + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; + public class GetAuthorsByIdsRow { - public byte? CBit { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlTypesBatch(List args) + public class GetAuthorsByIdsArgs { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + public long[] Ids { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + { + var transformedSql = GetAuthorsByIdsSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + var queryParams = new Dictionary(); + for (int i = 0; i < args.Ids.Length; i++) + queryParams.Add($"@idsArg{i}", args.Ids[i]); + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] - { - supportedDateTimeFormat - } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + var result = await connection.QueryAsync(transformedSql, queryParams); + return result.AsList(); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; + public class GetAuthorsByIdsAndNamesRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsAndNamesArgs + { + public long[] Ids { get; set; } + public string[] Names { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + { + var transformedSql = GetAuthorsByIdsAndNamesSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); + var queryParams = new Dictionary(); + for (int i = 0; i < args.Ids.Length; i++) + queryParams.Add($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + queryParams.Add($"@namesArg{i}", args.Names[i]); + if (this.Transaction == null) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) + using (var connection = new MySqlConnection(ConnectionString)) { - Local = true, - TableName = "mysql_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + var result = await connection.QueryAsync(transformedSql, queryParams); + return result.AsList(); + } } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; - public class GetMysqlTypesRow + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id); SELECT LAST_INSERT_ID()"; + public class CreateBookArgs { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public string CTime { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public string Name { get; set; } + public long AuthorId { get; set; } }; - public async Task GetMysqlTypes() + public async Task CreateBook(CreateBookArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql); - return result; + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } } @@ -789,116 +764,141 @@ public async Task GetMysqlTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; - public class GetMysqlTypesCntRow + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public class ListAllAuthorsBooksRow { - public long Cnt { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public byte? CBit { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public Author Author { get; set; } + public Book Book { get; set; } }; - public async Task GetMysqlTypesCnt() + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql); - return result; + await connection.OpenAsync(); + using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public class GetMysqlFunctionsRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public class GetDuplicateAuthorsRow { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetMysqlFunctions() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); - return result; + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public class GetAuthorsByBookNameRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } + }; + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncateMysqlTypesSql); + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) + { + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index 3e1218db..8f368d58 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/schema.sql" + "examples/config/mysql/types/schema.sql", + "examples/config/mysql/authors/schema.sql" ], "queries": [ - "examples/config/mysql/query.sql" + "examples/config/mysql/types/query.sql", + "examples/config/mysql/authors/query.sql" ], "codegen": { "out": "examples/MySqlConnectorDapperLegacyExample", @@ -23,95 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "mysql_types" @@ -508,6 +421,95 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ], "enums": [ @@ -606,1872 +608,1618 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_bit", + "length": 8, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "bit" }, - "originalName": "name" + "originalName": "c_bit" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "limit", - "notNull": true, - "length": -1, + "name": "c_bool", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_types" + }, "type": { - "name": "integer" - } + "name": "tinyint" + }, + "originalName": "c_bool" } }, { - "number": 2, + "number": 3, "column": { - "name": "offset", - "notNull": true, - "length": -1, + "name": "c_boolean", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_types" + }, "type": { - "name": "integer" - } + "name": "tinyint" + }, + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "tinyint" }, - "originalName": "id" + "originalName": "c_tinyint" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "smallint" }, - "originalName": "name" + "originalName": "c_smallint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "mediumint" }, - "originalName": "bio" + "originalName": "c_mediumint" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_int", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "name" + "originalName": "c_int" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 9, "column": { - "name": "id", - "notNull": true, + "name": "c_bigint", "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { "name": "bigint" }, - "originalName": "id" + "originalName": "c_bigint" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, + "name": "c_decimal", + "length": 10, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "name" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 11, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_dec", + "length": 10, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "name" + "originalName": "c_dec" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 12, "column": { - "name": "bio", - "length": -1, - "isNamedParam": true, + "name": "c_numeric", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "bio" + "originalName": "c_numeric" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 13, + "column": { + "name": "c_fixed", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 14, "column": { - "name": "ids", - "notNull": true, + "name": "c_float", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "float" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_float" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 15, "column": { - "name": "ids", - "notNull": true, + "name": "c_double", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "double" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_double" } }, { - "number": 2, + "number": 16, "column": { - "name": "names", - "notNull": true, + "name": "c_double_precision", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "double" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_double_precision" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", - "name": "CreateBook", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 17, "column": { - "name": "name", - "notNull": true, + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "char" }, - "originalName": "name" + "originalName": "c_char" } }, { - "number": 2, + "number": 18, "column": { - "name": "author_id", - "notNull": true, + "name": "c_nchar", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "char" }, - "originalName": "author_id" + "originalName": "c_nchar" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 19, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } - }, - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 20, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "parameters": [ - { - "number": 1, + "number": 21, "column": { - "name": "name", - "notNull": true, + "name": "c_tinytext", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "tinytext" }, - "originalName": "name" + "originalName": "c_tinytext" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 22, "column": { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 23, "column": { - "name": "c_bool", - "length": 1, + "name": "c_text", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_bool" + "originalName": "c_text" } }, { - "number": 3, + "number": 24, "column": { - "name": "c_boolean", - "length": 1, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "longtext" }, - "originalName": "c_boolean" + "originalName": "c_longtext" } }, { - "number": 4, + "number": 25, "column": { - "name": "c_tinyint", - "length": 3, + "name": "c_json", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "json" }, - "originalName": "c_tinyint" + "originalName": "c_json" } }, { - "number": 5, + "number": 26, "column": { - "name": "c_smallint", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "smallint" + "name": "json" }, - "originalName": "c_smallint" + "originalName": "c_json_string_override" } }, { - "number": 6, + "number": 27, "column": { - "name": "c_mediumint", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumint" + "name": "mysql_types_c_enum" }, - "originalName": "c_mediumint" + "originalName": "c_enum" } }, { - "number": 7, + "number": 28, "column": { - "name": "c_int", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_set" }, - "originalName": "c_int" + "originalName": "c_set" } }, { - "number": 8, + "number": 29, "column": { - "name": "c_integer", + "name": "c_year", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "year" }, - "originalName": "c_integer" + "originalName": "c_year" } }, { - "number": 9, + "number": 30, "column": { - "name": "c_bigint", + "name": "c_date", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "date" }, - "originalName": "c_bigint" + "originalName": "c_date" } }, { - "number": 10, + "number": 31, "column": { - "name": "c_decimal", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "datetime" }, - "originalName": "c_decimal" + "originalName": "c_datetime" } }, { - "number": 11, + "number": 32, "column": { - "name": "c_dec", - "length": 10, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "timestamp" }, - "originalName": "c_dec" + "originalName": "c_timestamp" } }, { - "number": 12, + "number": 33, "column": { - "name": "c_numeric", - "length": 10, + "name": "c_binary", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "binary" }, - "originalName": "c_numeric" + "originalName": "c_binary" } }, { - "number": 13, + "number": 34, "column": { - "name": "c_fixed", + "name": "c_varbinary", "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "varbinary" }, - "originalName": "c_fixed" + "originalName": "c_varbinary" } }, { - "number": 14, + "number": 35, "column": { - "name": "c_float", + "name": "c_tinyblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "float" + "name": "tinyblob" }, - "originalName": "c_float" + "originalName": "c_tinyblob" } }, { - "number": 15, + "number": 36, "column": { - "name": "c_double", + "name": "c_blob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "blob" }, - "originalName": "c_double" + "originalName": "c_blob" } }, { - "number": 16, + "number": 37, "column": { - "name": "c_double_precision", + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "mediumblob" }, - "originalName": "c_double_precision" + "originalName": "c_mediumblob" } }, { - "number": 17, + "number": 38, "column": { - "name": "c_char", + "name": "c_longblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "longblob" }, - "originalName": "c_char" + "originalName": "c_longblob" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_types" + } + }, + { + "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 18, + "number": 1, "column": { - "name": "c_nchar", - "length": -1, + "name": "c_bit", + "length": 8, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "bit" }, - "originalName": "c_nchar" + "originalName": "c_bit" } }, { - "number": 19, + "number": 2, "column": { - "name": "c_national_char", - "length": -1, + "name": "c_bool", + "length": 1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_bool" } }, { - "number": 20, + "number": 3, "column": { - "name": "c_varchar", - "length": 100, + "name": "c_boolean", + "length": 1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "tinyint" }, - "originalName": "c_varchar" + "originalName": "c_boolean" } }, { - "number": 21, + "number": 4, "column": { - "name": "c_tinytext", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "tinyint" }, - "originalName": "c_tinytext" + "originalName": "c_tinyint" } }, { - "number": 22, + "number": 5, "column": { - "name": "c_mediumtext", + "name": "c_smallint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "smallint" }, - "originalName": "c_mediumtext" + "originalName": "c_smallint" } }, { - "number": 23, + "number": 6, "column": { - "name": "c_text", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "text" + "name": "mediumint" }, - "originalName": "c_text" + "originalName": "c_mediumint" } }, { - "number": 24, + "number": 7, "column": { - "name": "c_longtext", + "name": "c_int", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "longtext" + "name": "int" }, - "originalName": "c_longtext" + "originalName": "c_int" } }, { - "number": 25, + "number": 8, "column": { - "name": "c_json", + "name": "c_integer", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "json" + "name": "int" }, - "originalName": "c_json" + "originalName": "c_integer" } }, { - "number": 26, + "number": 9, "column": { - "name": "c_json_string_override", + "name": "c_bigint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "json" + "name": "bigint" }, - "originalName": "c_json_string_override" + "originalName": "c_bigint" } }, { - "number": 27, + "number": 10, "column": { - "name": "c_enum", - "length": 6, + "name": "c_float", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "float" }, - "originalName": "c_enum" + "originalName": "c_float" } }, { - "number": 28, + "number": 11, "column": { - "name": "c_set", - "length": 15, + "name": "c_numeric", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" + "originalName": "c_numeric" } }, { - "number": 29, + "number": 12, "column": { - "name": "c_year", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "year" + "name": "decimal" }, - "originalName": "c_year" + "originalName": "c_decimal" } }, { - "number": 30, + "number": 13, "column": { - "name": "c_date", - "length": -1, + "name": "c_dec", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_dec" } }, { - "number": 31, + "number": 14, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_fixed", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_fixed" } }, { - "number": 32, + "number": 15, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_double", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "double" }, - "originalName": "c_timestamp" + "originalName": "c_double" } }, { - "number": 33, + "number": 16, "column": { - "name": "c_binary", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "binary" + "name": "double" }, - "originalName": "c_binary" + "originalName": "c_double_precision" } }, { - "number": 34, + "number": 17, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_char", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_char" } }, { - "number": 35, + "number": 18, "column": { - "name": "c_tinyblob", + "name": "c_nchar", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyblob" + "name": "char" }, - "originalName": "c_tinyblob" + "originalName": "c_nchar" } }, { - "number": 36, + "number": 19, "column": { - "name": "c_blob", + "name": "c_national_char", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "blob" + "name": "char" }, - "originalName": "c_blob" + "originalName": "c_national_char" } }, { - "number": 37, + "number": 20, "column": { - "name": "c_mediumblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumblob" + "name": "varchar" }, - "originalName": "c_mediumblob" + "originalName": "c_varchar" } }, { - "number": 38, + "number": 21, "column": { - "name": "c_longblob", + "name": "c_tinytext", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "longblob" + "name": "tinytext" }, - "originalName": "c_longblob" + "originalName": "c_tinytext" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 22, "column": { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 23, "column": { - "name": "c_bool", - "length": 1, + "name": "c_text", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_bool" + "originalName": "c_text" } }, { - "number": 3, + "number": 24, "column": { - "name": "c_boolean", - "length": 1, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - } - }, - { - "number": 4, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyint" + "name": "longtext" }, - "originalName": "c_tinyint" + "originalName": "c_longtext" } }, { - "number": 5, + "number": 25, "column": { - "name": "c_smallint", + "name": "c_json", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "smallint" + "name": "json" }, - "originalName": "c_smallint" + "originalName": "c_json" } }, { - "number": 6, + "number": 26, "column": { - "name": "c_mediumint", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumint" + "name": "json" }, - "originalName": "c_mediumint" + "originalName": "c_json_string_override" } }, { - "number": 7, + "number": 27, "column": { - "name": "c_int", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_enum" }, - "originalName": "c_int" + "originalName": "c_enum" } }, { - "number": 8, + "number": 28, "column": { - "name": "c_integer", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_set" }, - "originalName": "c_integer" + "originalName": "c_set" } }, { - "number": 9, + "number": 29, "column": { - "name": "c_bigint", + "name": "c_year", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "year" }, - "originalName": "c_bigint" + "originalName": "c_year" } }, { - "number": 10, + "number": 30, "column": { - "name": "c_float", + "name": "c_date", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "float" + "name": "date" }, - "originalName": "c_float" + "originalName": "c_date" } }, { - "number": 11, + "number": 31, "column": { - "name": "c_numeric", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "datetime" }, - "originalName": "c_numeric" + "originalName": "c_datetime" } }, { - "number": 12, + "number": 32, "column": { - "name": "c_decimal", - "length": 10, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "timestamp" }, - "originalName": "c_decimal" + "originalName": "c_timestamp" } }, { - "number": 13, + "number": 33, "column": { - "name": "c_dec", - "length": 10, + "name": "c_binary", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "binary" }, - "originalName": "c_dec" + "originalName": "c_binary" } }, { - "number": 14, + "number": 34, "column": { - "name": "c_fixed", + "name": "c_varbinary", "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "varbinary" }, - "originalName": "c_fixed" + "originalName": "c_varbinary" } }, { - "number": 15, + "number": 35, "column": { - "name": "c_double", + "name": "c_tinyblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "tinyblob" }, - "originalName": "c_double" + "originalName": "c_tinyblob" } }, { - "number": 16, + "number": 36, "column": { - "name": "c_double_precision", + "name": "c_blob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "blob" }, - "originalName": "c_double_precision" + "originalName": "c_blob" } }, { - "number": 17, + "number": 37, "column": { - "name": "c_char", + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumblob" }, - "originalName": "c_char" + "originalName": "c_mediumblob" } }, { - "number": 18, + "number": 38, "column": { - "name": "c_nchar", + "name": "c_longblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "longblob" }, - "originalName": "c_nchar" + "originalName": "c_longblob" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "name": "GetMysqlTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 19, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 20, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 21, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 22, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { - "number": 23, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } + "name": "c_int", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" }, { - "number": 24, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } + "name": "c_integer", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" }, { - "number": 25, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } + "name": "c_bigint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" }, { - "number": 26, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } + "name": "c_float", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" }, { - "number": 27, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } + "name": "c_decimal", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_decimal" }, { - "number": 28, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } + "name": "c_dec", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" }, { - "number": 29, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } + "name": "c_numeric", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" }, { - "number": 30, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } + "name": "c_fixed", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" }, { - "number": 31, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" }, { - "number": 32, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" }, { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } + "name": "c_year", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" }, { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" }, { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" }, { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" }, { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_char", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + }, + { + "name": "c_nchar", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + }, + { + "name": "c_national_char", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + }, + { + "name": "c_varchar", + "length": 100, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_tinytext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + }, + { + "name": "c_mediumtext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "c_longtext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + }, + { + "name": "c_enum", + "length": 6, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mysql_types_c_enum" + }, + "originalName": "c_enum" + }, + { + "name": "c_set", + "length": 15, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mysql_types_c_set" + }, + "originalName": "c_set" + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" }, { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } + "filename": "query.sql" }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", + "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, { "name": "c_bool", "length": 1, @@ -2494,6 +2242,17 @@ }, "originalName": "c_boolean" }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, { "name": "c_tinyint", "length": 3, @@ -2572,7 +2331,7 @@ "originalName": "c_float" }, { - "name": "c_decimal", + "name": "c_numeric", "length": 10, "table": { "name": "mysql_types" @@ -2580,10 +2339,10 @@ "type": { "name": "decimal" }, - "originalName": "c_decimal" + "originalName": "c_numeric" }, { - "name": "c_dec", + "name": "c_decimal", "length": 10, "table": { "name": "mysql_types" @@ -2591,10 +2350,10 @@ "type": { "name": "decimal" }, - "originalName": "c_dec" + "originalName": "c_decimal" }, { - "name": "c_numeric", + "name": "c_dec", "length": 10, "table": { "name": "mysql_types" @@ -2602,7 +2361,7 @@ "type": { "name": "decimal" }, - "originalName": "c_numeric" + "originalName": "c_dec" }, { "name": "c_fixed", @@ -2637,61 +2396,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -2825,15 +2529,48 @@ "originalName": "c_set" }, { - "name": "c_bit", - "length": 8, + "name": "c_year", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "bit" + "name": "year" }, - "originalName": "c_bit" + "originalName": "c_year" + }, + { + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" }, { "name": "c_binary", @@ -2905,479 +2642,744 @@ "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", - "name": "GetMysqlTypesCnt", + "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ { - "name": "cnt", + "name": "max_int", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "any" } }, { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" + "name": "any" + } }, { - "name": "c_smallint", + "name": "max_timestamp", + "notNull": true, "length": -1, - "table": { - "name": "mysql_types" - }, + "isFuncCall": true, "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, + "name": "any" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "name": "c_mediumint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "bigint" }, - "originalName": "c_mediumint" + "originalName": "id" }, { - "name": "c_int", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "name" }, { - "name": "c_integer", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_integer" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_bigint", + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_float", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "float" + "name": "text" }, - "originalName": "c_float" + "originalName": "name" }, { - "name": "c_numeric", - "length": 10, + "name": "bio", + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "type": { + "name": "integer" + } + } }, { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - }, + "number": 2, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", + "parameters": [ { - "name": "c_fixed", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_fixed" + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + } }, { - "name": "c_double", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double_precision" - }, + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "parameters": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_nchar", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "char" + "name": "bigint" }, - "originalName": "c_nchar" + "originalName": "id" }, { - "name": "c_national_char", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "varchar" + "name": "text" }, - "originalName": "c_varchar" + "originalName": "name" }, { - "name": "c_tinytext", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "tinytext" + "name": "text" }, - "originalName": "c_tinytext" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_mediumtext", + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mediumtext" + "name": "bigint" }, - "originalName": "c_mediumtext" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { "name": "text" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_longtext", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "longtext" + "name": "text" }, - "originalName": "c_longtext" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - }, + "number": 1, + "column": { + "name": "bio", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "name": "c_set", - "length": 15, + "name": "id", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mysql_types_c_set" + "name": "bigint" }, - "originalName": "c_set" + "originalName": "id" }, { - "name": "c_year", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "year" + "name": "text" }, - "originalName": "c_year" + "originalName": "name" }, { - "name": "c_date", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "date" + "name": "text" }, - "originalName": "c_date" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_datetime", - "length": 19, + "number": 1, + "column": { + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "isSqlcSlice": true, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "datetime" + "name": "bigint" }, - "originalName": "c_datetime" + "originalName": "id" }, { - "name": "c_timestamp", - "length": 19, + "name": "name", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "timestamp" + "name": "text" }, - "originalName": "c_timestamp" + "originalName": "name" }, { - "name": "c_binary", - "length": 3, + "name": "bio", + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "isSqlcSlice": true, + "originalName": "id" + } }, { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" + "number": 2, + "column": { + "name": "names", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "isSqlcSlice": true, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", + "name": "CreateBook", + "cmd": ":execlastid", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_tinyblob", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "bigint" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "books", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_mediumblob", + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_longblob", + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", - "cmd": ":one", + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "max_int", + "name": "id", "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "bigint" + }, + "originalName": "id" }, { - "name": "max_varchar", + "name": "name", "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "text" + }, + "originalName": "name" }, { - "name": "max_timestamp", - "notNull": true, + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", - "cmd": ":exec", "filename": "query.sql" }, { diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index 2ab67f13acd7fd315999caa5e38a62dc539ec4fd..d2a8fde344a95c745f8fb65f7ecd1e0750df4567 100644 GIT binary patch delta 134 zcmeykjPb&9Mt-hlHZDdZA=cc=;=&v$wTXseLM4?2sm1!m$r-7+iFzPW4WOt`k}6PK5Td%UG_|M_q*zHED98>~ywRc#4xVQgajafP5u+Ab;bsa;D9CTt? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); -public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); public enum MysqlTypesCEnum { diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index a15daeb9..8c4f615c 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -654,6 +654,135 @@ public async Task> GetAuthorsByBookName(GetAuthors } } + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; + public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(CreateExtendedBioSql, connection)) + { + command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateExtendedBioSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); + public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBiosBioType? BioType); + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetFirstExtendedBioByTypeSql, connection)) + { + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), + Name = reader.IsDBNull(1) ? null : reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetFirstExtendedBioByTypeSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), + Name = reader.IsDBNull(1) ? null : reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + }; + } + } + } + + return null; + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateExtendedBiosSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateExtendedBiosSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; public readonly record struct InsertMysqlTypesArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) @@ -1161,133 +1290,4 @@ public async Task TruncateMysqlTypes() await command.ExecuteNonQueryAsync(); } } - - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); - public async Task CreateExtendedBio(CreateExtendedBioArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateExtendedBioSql, connection)) - { - command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateExtendedBioSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); - public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBiosBioType? BioType); - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetFirstExtendedBioByTypeSql, connection)) - { - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetFirstExtendedBioByTypeRow - { - AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), - Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetFirstExtendedBioByTypeSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetFirstExtendedBioByTypeRow - { - AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), - Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() - }; - } - } - } - - return null; - } - - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateExtendedBiosSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncateExtendedBiosSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index 8097883f..555922e7 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/schema.sql" + "examples/config/mysql/types/schema.sql", + "examples/config/mysql/authors/schema.sql" ], "queries": [ - "examples/config/mysql/query.sql" + "examples/config/mysql/authors/query.sql", + "examples/config/mysql/types/query.sql" ], "codegen": { "out": "examples/MySqlConnectorExample", @@ -23,95 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "mysql_types" @@ -508,6 +421,95 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ], "enums": [ @@ -1305,6 +1307,157 @@ ], "filename": "query.sql" }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } + }, + { + "number": 4, + "column": { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + }, + { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, { "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", @@ -3379,157 +3532,6 @@ "name": "TruncateMysqlTypes", "cmd": ":exec", "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" - } - }, - { - "number": 2, - "column": { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - } - }, - { - "number": 3, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } - }, - { - "number": 4, - "column": { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "schema": "extended", - "name": "bios" - } - }, - { - "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", - "name": "GetFirstExtendedBioByType", - "cmd": ":one", - "columns": [ - { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" - }, - { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - }, - { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - }, - { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", - "cmd": ":exec", - "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index 603ba7f50bcdbf4450a1490b418a7e87413be251..ff012496f949702dbeff93b4626b2045e91b7c57 100644 GIT binary patch delta 134 zcmdnLjB(y_Mt-g*tXzynLae!!#f3RiY7-5`gi0z4Qj7JAlQU9t6ZJr%8bDEz#L|+C z{32v=C3T>aTF&Q&%e#CWNU~`ji8}sH3 N(iaUka|D_*0RYQUE<^wT delta 75 zcmbQYoN@m$Mt-hVRxU;(A=cc=;=&v$g^7t`JjKZwskw=IK)#YZkiT(R1k+}IE+fXx df0+c?H#>5l7uY;q{(|9VewlXW&FO*WOaOdO7%l(+ diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index 26c2c7c6..fc7ef595 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -6,19 +6,6 @@ namespace MySqlConnectorLegacyExampleGen using System.Linq; using System.Text.Json; - public class Author - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class Book - { - public long Id { get; set; } - public string Name { get; set; } - public long AuthorId { get; set; } - public string Description { get; set; } - }; public class MysqlType { public bool? CBool { get; set; } @@ -61,6 +48,19 @@ public class MysqlType public byte[] CMediumblob { get; set; } public byte[] CLongblob { get; set; } }; + public class Author + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class Book + { + public long Id { get; set; } + public string Name { get; set; } + public long AuthorId { get; set; } + public string Description { get; set; } + }; public class ExtendedBio { public string AuthorName { get; set; } diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 89e6aa2c..0aed6423 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -42,43 +42,100 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorArgs + private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + public class InsertMysqlTypesArgs { - public string Name { get; set; } + public byte? CBit { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CFloat { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorSql, connection)) + using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -88,189 +145,247 @@ public async Task GetAuthor(GetAuthorArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; + command.CommandText = InsertMysqlTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs + public class InsertMysqlTypesBatchArgs { - public int Limit { get; set; } - public int Offset { get; set; } + public byte? CBit { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> ListAuthors(ListAuthorsArgs args) + public async Task InsertMysqlTypesBatch(List args) { - if (this.Transaction == null) + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) { - using (var connection = new MySqlConnection(ConnectionString)) + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions { - await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAuthorsSql, connection)) + Formats = new[] { - command.Parameters.AddWithValue("@limit", args.Limit); - command.Parameters.AddWithValue("@offset", args.Offset); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + supportedDateTimeFormat } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAuthorsSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@limit", args.Limit); - command.Parameters.AddWithValue("@offset", args.Offset); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } - } - } - - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; - public class CreateAuthorArgs - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public async Task CreateAuthor(CreateAuthorArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateAuthorSql, connection)) - { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateAuthorSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); } - } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio)"; - public class CreateAuthorReturnIdArgs - { - public string Name { get; set; } - public string Bio { get; set; } - }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) - { - if (this.Transaction == null) + using (var connection = new MySqlConnection(ConnectionString)) { - using (var connection = new MySqlConnection(ConnectionString)) + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) { - await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateAuthorReturnIdSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateAuthorReturnIdSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; + Local = true, + TableName = "mysql_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; + public class GetMysqlTypesRow { - public long Id { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetMysqlTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorByIdSql, connection)) + using (var command = new MySqlCommand(GetMysqlTypesSql, connection)) { - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetMysqlTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), + CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), + CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), + CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), + CTime = reader.IsDBNull(17) ? null : reader.GetString(17), + CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), + CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), + CChar = reader.IsDBNull(20) ? null : reader.GetString(20), + CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), + CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), + CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), + CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), + CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), + CText = reader.IsDBNull(26) ? null : reader.GetString(26), + CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), + CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), + CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), + CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), + CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), + CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), + CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), + CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), + CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), + CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), + CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) }; } } @@ -287,95 +402,165 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = GetMysqlTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } - } - - return null; - } - - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs - { - public string NamePattern { get; set; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorByNamePatternSql, connection)) - { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) + return new GetMysqlTypesRow { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } - } + CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), + CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), + CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), + CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), + CTime = reader.IsDBNull(17) ? null : reader.GetString(17), + CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), + CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), + CChar = reader.IsDBNull(20) ? null : reader.GetString(20), + CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), + CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), + CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), + CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), + CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), + CText = reader.IsDBNull(26) ? null : reader.GetString(26), + CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), + CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), + CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), + CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), + CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), + CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), + CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), + CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), + CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), + CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), + CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + }; + } } } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorByNamePatternSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } - } + return null; } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public class DeleteAuthorArgs + private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public class GetMysqlTypesCntRow { - public string Name { get; set; } + public long Cnt { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public byte? CBit { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task GetMysqlTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(DeleteAuthorSql, connection)) + using (var command = new MySqlCommand(GetMysqlTypesCntSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlTypesCntRow + { + Cnt = reader.GetInt64(0), + CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), + CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), + CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), + CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), + CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), + CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), + CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), + CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), + CChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), + CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), + CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), + CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CText = reader.IsDBNull(23) ? null : reader.GetString(23), + CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), + CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), + CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), + CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), + CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), + CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), + CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), + CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), + CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), + CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), + CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), + CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -385,28 +570,93 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = GetMysqlTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlTypesCntRow + { + Cnt = reader.GetInt64(0), + CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), + CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), + CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), + CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), + CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), + CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), + CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), + CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), + CChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), + CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), + CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), + CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CText = reader.IsDBNull(23) ? null : reader.GetString(23), + CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), + CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), + CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), + CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), + CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), + CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), + CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), + CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), + CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), + CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), + CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), + CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), + CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + }; + } + } } + + return null; } - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() + private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; + public class GetMysqlFunctionsRow + { + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } + }; + public async Task GetMysqlFunctions() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(DeleteAllAuthorsSql, connection)) + using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -416,30 +666,40 @@ public async Task DeleteAllAuthors() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAllAuthorsSql; + command.CommandText = GetMysqlFunctionsSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } + + return null; } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public class UpdateAuthorsArgs - { - public string Bio { get; set; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(UpdateAuthorsSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -449,99 +709,106 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = UpdateAuthorsSql; + command.CommandText = TruncateMysqlTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; - public class GetAuthorsByIdsRow + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; + public class GetAuthorRow { public long Id { get; set; } public string Name { get; set; } public string Bio { get; set; } }; - public class GetAuthorsByIdsArgs + public class GetAuthorArgs { - public long[] Ids { get; set; } + public string Name { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetAuthor(GetAuthorArgs args) { - var transformedSql = GetAuthorsByIdsSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(transformedSql, connection)) + using (var command = new MySqlCommand(GetAuthorSql, connection)) { - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = transformedSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } } } + + return null; } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; - public class GetAuthorsByIdsAndNamesRow + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + public class ListAuthorsRow { public long Id { get; set; } public string Name { get; set; } public string Bio { get; set; } }; - public class GetAuthorsByIdsAndNamesArgs + public class ListAuthorsArgs { - public long[] Ids { get; set; } - public string[] Names { get; set; } + public int Limit { get; set; } + public int Offset { get; set; } }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + public async Task> ListAuthors(ListAuthorsArgs args) { - var transformedSql = GetAuthorsByIdsAndNamesSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(transformedSql, connection)) + using (var command = new MySqlCommand(ListAuthorsSql, connection)) { - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + command.Parameters.AddWithValue("@limit", args.Limit); + command.Parameters.AddWithValue("@offset", args.Offset); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); return result; } } @@ -552,43 +819,44 @@ public async Task> GetAuthorsByIdsAndNames(GetA throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = transformedSql; + command.CommandText = ListAuthorsSql; command.Transaction = this.Transaction; - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + command.Parameters.AddWithValue("@limit", args.Limit); + command.Parameters.AddWithValue("@offset", args.Offset); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); return result; } } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id)"; - public class CreateBookArgs + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; + public class CreateAuthorArgs { + public long Id { get; set; } public string Name { get; set; } - public long AuthorId { get; set; } + public string Bio { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateBookSql, connection)) + using (var command = new MySqlCommand(CreateAuthorSql, connection)) { + command.Parameters.AddWithValue("@id", args.Id); command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -598,126 +866,146 @@ public async Task CreateBook(CreateBookArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public class ListAllAuthorsBooksRow + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio)"; + public class CreateAuthorReturnIdArgs { - public Author Author { get; set; } - public Book Book { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) + using (var command = new MySqlCommand(CreateAuthorReturnIdSql, connection)) { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAllAuthorsBooksSql; + command.CommandText = CreateAuthorReturnIdSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public class GetDuplicateAuthorsRow + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task> GetDuplicateAuthors() + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) + using (var command = new MySqlCommand(GetAuthorByIdSql, connection)) { + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetAuthorByIdRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetAuthorByIdRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public class GetAuthorsByBookNameRow + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + public class GetAuthorByNamePatternRow { public long Id { get; set; } public string Name { get; set; } public string Bio { get; set; } - public Book Book { get; set; } }; - public class GetAuthorsByBookNameArgs + public class GetAuthorByNamePatternArgs { - public string Name { get; set; } + public string NamePattern { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) + using (var command = new MySqlCommand(GetAuthorByNamePatternSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); return result; } } @@ -728,108 +1016,34 @@ public async Task> GetAuthorsByBookName(GetAuthors throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = GetAuthorByNamePatternSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); return result; } } } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; - public class InsertMysqlTypesArgs + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public class DeleteAuthorArgs { - public byte? CBit { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CFloat { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public string Name { get; set; } }; - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) + using (var command = new MySqlCommand(DeleteAuthorSql, connection)) { - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -844,254 +1058,61 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlTypesSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - public class InsertMysqlTypesBatchArgs - { - public byte? CBit { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } - }; - public async Task InsertMysqlTypesBatch(List args) + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] + await connection.OpenAsync(); + using (var command = new MySqlCommand(DeleteAllAuthorsSql, connection)) { - supportedDateTimeFormat + await command.ExecuteNonQueryAsync(); } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + } + + return; } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) - { - Local = true, - TableName = "mysql_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAllAuthorsSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; - public class GetMysqlTypesRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public class UpdateAuthorsArgs { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public string CTime { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlTypes() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesSql, connection)) + using (var command = new MySqlCommand(UpdateAuthorsSql, connection)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetMysqlTypesRow - { - CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), - CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), - CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) - }; - } - } + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1101,165 +1122,146 @@ public async Task GetMysqlTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesSql; + command.CommandText = UpdateAuthorsSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); + } + } + + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; + public class GetAuthorsByIdsRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsArgs + { + public long[] Ids { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + { + var transformedSql = GetAuthorsByIdsSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) { - if (await reader.ReadAsync()) + await connection.OpenAsync(); + using (var command = new MySqlCommand(transformedSql, connection)) { - return new GetMysqlTypesRow + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + using (var reader = await command.ExecuteReaderAsync()) { - CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), - CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), - CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) - }; + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = transformedSql; + command.Transaction = this.Transaction; + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; - public class GetMysqlTypesCntRow + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; + public class GetAuthorsByIdsAndNamesRow { - public long Cnt { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public byte? CBit { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsAndNamesArgs + { + public long[] Ids { get; set; } + public string[] Names { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + { + var transformedSql = GetAuthorsByIdsAndNamesSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = transformedSql; + command.Transaction = this.Transaction; + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id)"; + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } }; - public async Task GetMysqlTypesCnt() + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesCntSql, connection)) + using (var command = new MySqlCommand(CreateBookSql, connection)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetMysqlTypesCntRow - { - Cnt = reader.GetInt64(0), - CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) - }; - } - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1269,148 +1271,146 @@ public async Task GetMysqlTypesCnt() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesCntSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; + } + } + + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public class ListAllAuthorsBooksRow + { + public Author Author { get; set; } + public Book Book { get; set; } + }; + public async Task> ListAllAuthorsBooks() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) { - if (await reader.ReadAsync()) + await connection.OpenAsync(); + using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) { - return new GetMysqlTypesCntRow + using (var reader = await command.ExecuteReaderAsync()) { - Cnt = reader.GetInt64(0), - CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) - }; + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public class GetMysqlFunctionsRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public class GetDuplicateAuthorsRow { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetMysqlFunctions() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlFunctionsRow - { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlFunctionsSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlFunctionsRow - { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public class GetAuthorsByBookNameRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } + }; + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlTypesSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index 7c97aba4..8732070b 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/schema.sql" + "examples/config/mysql/types/schema.sql", + "examples/config/mysql/authors/schema.sql" ], "queries": [ - "examples/config/mysql/query.sql" + "examples/config/mysql/types/query.sql", + "examples/config/mysql/authors/query.sql" ], "codegen": { "out": "examples/MySqlConnectorLegacyExample", @@ -23,95 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "mysql_types" @@ -508,6 +421,95 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ], "enums": [ @@ -606,1872 +608,1618 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_bit", + "length": 8, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "bit" }, - "originalName": "name" + "originalName": "c_bit" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "limit", - "notNull": true, - "length": -1, + "name": "c_bool", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_types" + }, "type": { - "name": "integer" - } + "name": "tinyint" + }, + "originalName": "c_bool" } }, { - "number": 2, + "number": 3, "column": { - "name": "offset", - "notNull": true, - "length": -1, + "name": "c_boolean", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_types" + }, "type": { - "name": "integer" - } + "name": "tinyint" + }, + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "tinyint" }, - "originalName": "id" + "originalName": "c_tinyint" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "smallint" }, - "originalName": "name" + "originalName": "c_smallint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "mediumint" }, - "originalName": "bio" + "originalName": "c_mediumint" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_int", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "name" + "originalName": "c_int" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 9, "column": { - "name": "id", - "notNull": true, + "name": "c_bigint", "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { "name": "bigint" }, - "originalName": "id" + "originalName": "c_bigint" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, + "name": "c_decimal", + "length": 10, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "name" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 11, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_dec", + "length": 10, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "name" + "originalName": "c_dec" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 12, "column": { - "name": "bio", - "length": -1, - "isNamedParam": true, + "name": "c_numeric", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "decimal" }, - "originalName": "bio" + "originalName": "c_numeric" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 13, + "column": { + "name": "c_fixed", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 14, "column": { - "name": "ids", - "notNull": true, + "name": "c_float", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "float" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_float" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 15, "column": { - "name": "ids", - "notNull": true, + "name": "c_double", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "double" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_double" } }, { - "number": 2, + "number": 16, "column": { - "name": "names", - "notNull": true, + "name": "c_double_precision", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "double" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_double_precision" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", - "name": "CreateBook", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 17, "column": { - "name": "name", - "notNull": true, + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_types" }, "type": { - "name": "text" + "name": "char" }, - "originalName": "name" + "originalName": "c_char" } }, { - "number": 2, + "number": 18, "column": { - "name": "author_id", - "notNull": true, + "name": "c_nchar", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "char" }, - "originalName": "author_id" + "originalName": "c_nchar" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 19, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } - }, - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 20, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "parameters": [ - { - "number": 1, + "number": 21, "column": { - "name": "name", - "notNull": true, + "name": "c_tinytext", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "mysql_types" }, "type": { - "name": "text" + "name": "tinytext" }, - "originalName": "name" + "originalName": "c_tinytext" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 22, "column": { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 23, "column": { - "name": "c_bool", - "length": 1, + "name": "c_text", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_bool" + "originalName": "c_text" } }, { - "number": 3, + "number": 24, "column": { - "name": "c_boolean", - "length": 1, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "longtext" }, - "originalName": "c_boolean" + "originalName": "c_longtext" } }, { - "number": 4, + "number": 25, "column": { - "name": "c_tinyint", - "length": 3, + "name": "c_json", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "json" }, - "originalName": "c_tinyint" + "originalName": "c_json" } }, { - "number": 5, + "number": 26, "column": { - "name": "c_smallint", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "smallint" + "name": "json" }, - "originalName": "c_smallint" + "originalName": "c_json_string_override" } }, { - "number": 6, + "number": 27, "column": { - "name": "c_mediumint", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumint" + "name": "mysql_types_c_enum" }, - "originalName": "c_mediumint" + "originalName": "c_enum" } }, { - "number": 7, + "number": 28, "column": { - "name": "c_int", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_set" }, - "originalName": "c_int" + "originalName": "c_set" } }, { - "number": 8, + "number": 29, "column": { - "name": "c_integer", + "name": "c_year", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "year" }, - "originalName": "c_integer" + "originalName": "c_year" } }, { - "number": 9, + "number": 30, "column": { - "name": "c_bigint", + "name": "c_date", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "date" }, - "originalName": "c_bigint" + "originalName": "c_date" } }, { - "number": 10, + "number": 31, "column": { - "name": "c_decimal", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "datetime" }, - "originalName": "c_decimal" + "originalName": "c_datetime" } }, { - "number": 11, + "number": 32, "column": { - "name": "c_dec", - "length": 10, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "timestamp" }, - "originalName": "c_dec" + "originalName": "c_timestamp" } }, { - "number": 12, + "number": 33, "column": { - "name": "c_numeric", - "length": 10, + "name": "c_binary", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "binary" }, - "originalName": "c_numeric" + "originalName": "c_binary" } }, { - "number": 13, + "number": 34, "column": { - "name": "c_fixed", + "name": "c_varbinary", "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "varbinary" }, - "originalName": "c_fixed" + "originalName": "c_varbinary" } }, { - "number": 14, + "number": 35, "column": { - "name": "c_float", + "name": "c_tinyblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "float" + "name": "tinyblob" }, - "originalName": "c_float" + "originalName": "c_tinyblob" } }, { - "number": 15, + "number": 36, "column": { - "name": "c_double", + "name": "c_blob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "blob" }, - "originalName": "c_double" + "originalName": "c_blob" } }, { - "number": 16, + "number": 37, "column": { - "name": "c_double_precision", + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "mediumblob" }, - "originalName": "c_double_precision" + "originalName": "c_mediumblob" } }, { - "number": 17, + "number": 38, "column": { - "name": "c_char", + "name": "c_longblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "longblob" }, - "originalName": "c_char" + "originalName": "c_longblob" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_types" + } + }, + { + "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 18, + "number": 1, "column": { - "name": "c_nchar", - "length": -1, + "name": "c_bit", + "length": 8, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "bit" }, - "originalName": "c_nchar" + "originalName": "c_bit" } }, { - "number": 19, + "number": 2, "column": { - "name": "c_national_char", - "length": -1, + "name": "c_bool", + "length": 1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_bool" } }, { - "number": 20, + "number": 3, "column": { - "name": "c_varchar", - "length": 100, + "name": "c_boolean", + "length": 1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "tinyint" }, - "originalName": "c_varchar" + "originalName": "c_boolean" } }, { - "number": 21, + "number": 4, "column": { - "name": "c_tinytext", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "tinyint" }, - "originalName": "c_tinytext" + "originalName": "c_tinyint" } }, { - "number": 22, + "number": 5, "column": { - "name": "c_mediumtext", + "name": "c_smallint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "smallint" }, - "originalName": "c_mediumtext" + "originalName": "c_smallint" } }, { - "number": 23, + "number": 6, "column": { - "name": "c_text", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "text" + "name": "mediumint" }, - "originalName": "c_text" + "originalName": "c_mediumint" } }, { - "number": 24, + "number": 7, "column": { - "name": "c_longtext", + "name": "c_int", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "longtext" + "name": "int" }, - "originalName": "c_longtext" + "originalName": "c_int" } }, { - "number": 25, + "number": 8, "column": { - "name": "c_json", + "name": "c_integer", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "json" + "name": "int" }, - "originalName": "c_json" + "originalName": "c_integer" } }, { - "number": 26, + "number": 9, "column": { - "name": "c_json_string_override", + "name": "c_bigint", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "json" + "name": "bigint" }, - "originalName": "c_json_string_override" + "originalName": "c_bigint" } }, { - "number": 27, + "number": 10, "column": { - "name": "c_enum", - "length": 6, + "name": "c_float", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "float" }, - "originalName": "c_enum" + "originalName": "c_float" } }, { - "number": 28, + "number": 11, "column": { - "name": "c_set", - "length": 15, + "name": "c_numeric", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" + "originalName": "c_numeric" } }, { - "number": 29, + "number": 12, "column": { - "name": "c_year", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "year" + "name": "decimal" }, - "originalName": "c_year" + "originalName": "c_decimal" } }, { - "number": 30, + "number": 13, "column": { - "name": "c_date", - "length": -1, + "name": "c_dec", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_dec" } }, { - "number": 31, + "number": 14, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_fixed", + "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_fixed" } }, { - "number": 32, + "number": 15, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_double", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "double" }, - "originalName": "c_timestamp" + "originalName": "c_double" } }, { - "number": 33, + "number": 16, "column": { - "name": "c_binary", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "binary" + "name": "double" }, - "originalName": "c_binary" + "originalName": "c_double_precision" } }, { - "number": 34, + "number": 17, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_char", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_char" } }, { - "number": 35, + "number": 18, "column": { - "name": "c_tinyblob", + "name": "c_nchar", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyblob" + "name": "char" }, - "originalName": "c_tinyblob" + "originalName": "c_nchar" } }, { - "number": 36, + "number": 19, "column": { - "name": "c_blob", + "name": "c_national_char", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "blob" + "name": "char" }, - "originalName": "c_blob" + "originalName": "c_national_char" } }, { - "number": 37, + "number": 20, "column": { - "name": "c_mediumblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumblob" + "name": "varchar" }, - "originalName": "c_mediumblob" + "originalName": "c_varchar" } }, { - "number": 38, + "number": 21, "column": { - "name": "c_longblob", + "name": "c_tinytext", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "longblob" + "name": "tinytext" }, - "originalName": "c_longblob" + "originalName": "c_tinytext" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 22, "column": { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 23, "column": { - "name": "c_bool", - "length": 1, + "name": "c_text", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_bool" + "originalName": "c_text" } }, { - "number": 3, + "number": 24, "column": { - "name": "c_boolean", - "length": 1, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - } - }, - { - "number": 4, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyint" + "name": "longtext" }, - "originalName": "c_tinyint" + "originalName": "c_longtext" } }, { - "number": 5, + "number": 25, "column": { - "name": "c_smallint", + "name": "c_json", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "smallint" + "name": "json" }, - "originalName": "c_smallint" + "originalName": "c_json" } }, { - "number": 6, + "number": 26, "column": { - "name": "c_mediumint", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "mediumint" + "name": "json" }, - "originalName": "c_mediumint" + "originalName": "c_json_string_override" } }, { - "number": 7, + "number": 27, "column": { - "name": "c_int", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_enum" }, - "originalName": "c_int" + "originalName": "c_enum" } }, { - "number": 8, + "number": 28, "column": { - "name": "c_integer", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "int" + "name": "mysql_types_c_set" }, - "originalName": "c_integer" + "originalName": "c_set" } }, { - "number": 9, + "number": 29, "column": { - "name": "c_bigint", + "name": "c_year", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "bigint" + "name": "year" }, - "originalName": "c_bigint" + "originalName": "c_year" } }, { - "number": 10, + "number": 30, "column": { - "name": "c_float", + "name": "c_date", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "float" + "name": "date" }, - "originalName": "c_float" + "originalName": "c_date" } }, { - "number": 11, + "number": 31, "column": { - "name": "c_numeric", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "datetime" }, - "originalName": "c_numeric" + "originalName": "c_datetime" } }, { - "number": 12, + "number": 32, "column": { - "name": "c_decimal", - "length": 10, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "timestamp" }, - "originalName": "c_decimal" + "originalName": "c_timestamp" } }, { - "number": 13, + "number": 33, "column": { - "name": "c_dec", - "length": 10, + "name": "c_binary", + "length": 3, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "binary" }, - "originalName": "c_dec" + "originalName": "c_binary" } }, { - "number": 14, + "number": 34, "column": { - "name": "c_fixed", + "name": "c_varbinary", "length": 10, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "decimal" + "name": "varbinary" }, - "originalName": "c_fixed" + "originalName": "c_varbinary" } }, { - "number": 15, + "number": 35, "column": { - "name": "c_double", + "name": "c_tinyblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "tinyblob" }, - "originalName": "c_double" + "originalName": "c_tinyblob" } }, { - "number": 16, + "number": 36, "column": { - "name": "c_double_precision", + "name": "c_blob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "double" + "name": "blob" }, - "originalName": "c_double_precision" + "originalName": "c_blob" } }, { - "number": 17, + "number": 37, "column": { - "name": "c_char", + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumblob" }, - "originalName": "c_char" + "originalName": "c_mediumblob" } }, { - "number": 18, + "number": 38, "column": { - "name": "c_nchar", + "name": "c_longblob", "length": -1, "table": { "schema": "public", "name": "mysql_types" }, "type": { - "name": "char" + "name": "longblob" }, - "originalName": "c_nchar" + "originalName": "c_longblob" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "name": "GetMysqlTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 19, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 20, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 21, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 22, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { - "number": 23, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } + "name": "c_int", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" }, { - "number": 24, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } + "name": "c_integer", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" }, { - "number": 25, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } + "name": "c_bigint", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" }, { - "number": 26, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } + "name": "c_float", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" }, { - "number": 27, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } + "name": "c_decimal", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_decimal" }, { - "number": 28, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } + "name": "c_dec", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" }, { - "number": 29, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } + "name": "c_numeric", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" }, { - "number": 30, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } + "name": "c_fixed", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" }, { - "number": 31, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" }, { - "number": 32, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" }, { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } + "name": "c_year", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" }, { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" }, { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" }, { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" }, { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_char", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + }, + { + "name": "c_nchar", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + }, + { + "name": "c_national_char", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + }, + { + "name": "c_varchar", + "length": 100, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_tinytext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + }, + { + "name": "c_mediumtext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "c_longtext", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + }, + { + "name": "c_enum", + "length": 6, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mysql_types_c_enum" + }, + "originalName": "c_enum" + }, + { + "name": "c_set", + "length": 15, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mysql_types_c_set" + }, + "originalName": "c_set" + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" }, { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } + "filename": "query.sql" }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", + "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, { "name": "c_bool", "length": 1, @@ -2494,6 +2242,17 @@ }, "originalName": "c_boolean" }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, { "name": "c_tinyint", "length": 3, @@ -2572,7 +2331,7 @@ "originalName": "c_float" }, { - "name": "c_decimal", + "name": "c_numeric", "length": 10, "table": { "name": "mysql_types" @@ -2580,10 +2339,10 @@ "type": { "name": "decimal" }, - "originalName": "c_decimal" + "originalName": "c_numeric" }, { - "name": "c_dec", + "name": "c_decimal", "length": 10, "table": { "name": "mysql_types" @@ -2591,10 +2350,10 @@ "type": { "name": "decimal" }, - "originalName": "c_dec" + "originalName": "c_decimal" }, { - "name": "c_numeric", + "name": "c_dec", "length": 10, "table": { "name": "mysql_types" @@ -2602,7 +2361,7 @@ "type": { "name": "decimal" }, - "originalName": "c_numeric" + "originalName": "c_dec" }, { "name": "c_fixed", @@ -2637,61 +2396,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -2825,15 +2529,48 @@ "originalName": "c_set" }, { - "name": "c_bit", - "length": 8, + "name": "c_year", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "bit" + "name": "year" }, - "originalName": "c_bit" + "originalName": "c_year" + }, + { + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" }, { "name": "c_binary", @@ -2905,479 +2642,744 @@ "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", - "name": "GetMysqlTypesCnt", + "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ { - "name": "cnt", + "name": "max_int", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "any" } }, { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" + "name": "any" + } }, { - "name": "c_smallint", + "name": "max_timestamp", + "notNull": true, "length": -1, - "table": { - "name": "mysql_types" - }, + "isFuncCall": true, "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, + "name": "any" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "name": "c_mediumint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "bigint" }, - "originalName": "c_mediumint" + "originalName": "id" }, { - "name": "c_int", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "name" }, { - "name": "c_integer", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_integer" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_bigint", + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_float", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "float" + "name": "text" }, - "originalName": "c_float" + "originalName": "name" }, { - "name": "c_numeric", - "length": 10, + "name": "bio", + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "type": { + "name": "integer" + } + } }, { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - }, + "number": 2, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", + "parameters": [ { - "name": "c_fixed", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_fixed" + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + } }, { - "name": "c_double", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double_precision" - }, + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "parameters": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_nchar", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "char" + "name": "bigint" }, - "originalName": "c_nchar" + "originalName": "id" }, { - "name": "c_national_char", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "varchar" + "name": "text" }, - "originalName": "c_varchar" + "originalName": "name" }, { - "name": "c_tinytext", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "tinytext" + "name": "text" }, - "originalName": "c_tinytext" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_mediumtext", + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mediumtext" + "name": "bigint" }, - "originalName": "c_mediumtext" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { "name": "text" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_longtext", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "longtext" + "name": "text" }, - "originalName": "c_longtext" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - }, + "number": 1, + "column": { + "name": "bio", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "name": "c_set", - "length": 15, + "name": "id", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "mysql_types_c_set" + "name": "bigint" }, - "originalName": "c_set" + "originalName": "id" }, { - "name": "c_year", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "year" + "name": "text" }, - "originalName": "c_year" + "originalName": "name" }, { - "name": "c_date", + "name": "bio", "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "date" + "name": "text" }, - "originalName": "c_date" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_datetime", - "length": 19, + "number": 1, + "column": { + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "isSqlcSlice": true, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "datetime" + "name": "bigint" }, - "originalName": "c_datetime" + "originalName": "id" }, { - "name": "c_timestamp", - "length": 19, + "name": "name", + "notNull": true, + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "timestamp" + "name": "text" }, - "originalName": "c_timestamp" + "originalName": "name" }, { - "name": "c_binary", - "length": 3, + "name": "bio", + "length": -1, "table": { - "name": "mysql_types" + "name": "authors" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "isSqlcSlice": true, + "originalName": "id" + } }, { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" + "number": 2, + "column": { + "name": "names", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "isSqlcSlice": true, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", + "name": "CreateBook", + "cmd": ":execlastid", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_tinyblob", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "bigint" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "books", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_mediumblob", + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_longblob", + "name": "authors", "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", - "cmd": ":one", + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "max_int", + "name": "id", "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "bigint" + }, + "originalName": "id" }, { - "name": "max_varchar", + "name": "name", "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "text" + }, + "originalName": "name" }, { - "name": "max_timestamp", - "notNull": true, + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", - "cmd": ":exec", "filename": "query.sql" }, { diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index 1fd01367d7f1472049eae1d9cd8c77dc6732b298..61bb166b2a590f2acb65097b797be85b9b24117a 100644 GIT binary patch delta 143 zcmaF3jPby7Mt-gztXzynLae!!#f3RiY7-5`gi0z4Qj7JAlQU9t6ZJr%8bDEz#L|+C z{32v=B~_rfAVhUxX=+g=NU@SSP>>y}c%xe`)8=Q)0_>YNF&Q&%{>C+xk!6b{*Y3#; W8kL)!WI_x#ck@{BZ=U9RniT+^#4)S@ delta 75 zcmX@Gobll@Mt-iDtXzynLae!!#f3Ri3KJ8>c#4xVQgajafP5u+Ab;bs9H!06Tt GetAuthor(GetAuthorArgs args) + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); - return result; + await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -72,64 +115,114 @@ public class GetAuthorArgs throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; - public class ListAuthorsRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class ListAuthorsArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public required int Offset { get; init; } - public required int Limit { get; init; } + public bool? CBoolean { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + public Guid? CUuid { get; init; } + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } }; - public async Task> ListAuthors(ListAuthorsArgs args) + public async Task InsertPostgresTypesBatch(List args) { - var queryParams = new Dictionary(); - queryParams.Add("offset", args.Offset); - queryParams.Add("limit", args.Limit); - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { - var result = await connection.QueryAsync(ListAuthorsSql, queryParams); - return result.AsList(); + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); + await writer.WriteAsync(row.CUuid); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } + + await writer.CompleteAsync(); } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); + await connection.CloseAsync(); + } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class CreateAuthorArgs + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public class GetPostgresTypesRow { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + public Guid? CUuid { get; init; } + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } }; - public async Task CreateAuthor(CreateAuthorArgs args) + public async Task GetPostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); return result; } } @@ -139,29 +232,45 @@ public class CreateAuthorArgs throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow - { - public required long Id { get; init; } - }; - public class CreateAuthorReturnIdArgs + private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + public class GetPostgresTypesCntRow { - public required string Name { get; init; } - public string? Bio { get; init; } + public short? CSmallint { get; init; } + public bool? CBoolean { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + public Guid? CUuid { get; init; } + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public required long Cnt { get; init; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task GetPostgresTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + return result; } } @@ -170,29 +279,23 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; - public class GetAuthorByIdRow + private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + public class GetPostgresFunctionsRow { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } + public int? MaxInteger { get; init; } + public string? MaxVarchar { get; init; } + public required DateTime MaxTimestamp { get; init; } }; - public class GetAuthorByIdArgs - { - public required long Id { get; init; } - }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresFunctions() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); return result; } } @@ -202,52 +305,54 @@ public class GetAuthorByIdArgs throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; - public class GetAuthorByNamePatternRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorByNamePatternArgs - { - public string? NamePattern { get; init; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); - return result.AsList(); + await connection.ExecuteAsync(TruncatePostgresTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public class DeleteAuthorArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public required string Name { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); } return; @@ -258,20 +363,28 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow + { + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } + }; + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncateAuthorsSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -279,24 +392,20 @@ public async Task TruncateAuthors() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public class UpdateAuthorsArgs - { - public string? Bio { get; init; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -304,89 +413,117 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; - public class GetAuthorsByIdsRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorsByIdsArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + public class InsertPostgresArrayTypesArgs { - public required long[] LongArr1 { get; init; } + public byte[]? CBytea { get; init; } + public bool[]? CBooleanArray { get; init; } + public string[]? CTextArray { get; init; } + public int[]? CIntegerArray { get; init; } + public decimal[]? CDecimalArray { get; init; } + public DateTime[]? CDateArray { get; init; } + public DateTime[]? CTimestampArray { get; init; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("c_bytea", args.CBytea); + queryParams.Add("c_boolean_array", args.CBooleanArray); + queryParams.Add("c_text_array", args.CTextArray); + queryParams.Add("c_integer_array", args.CIntegerArray); + queryParams.Add("c_decimal_array", args.CDecimalArray); + queryParams.Add("c_date_array", args.CDateArray); + queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); - return result.AsList(); + await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; - public class GetAuthorsByIdsAndNamesRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorsByIdsAndNamesArgs + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public required long[] LongArr1 { get; init; } - public required string[] StringArr2 { get; init; } + public byte[]? CBytea { get; init; } + public bool[]? CBooleanArray { get; init; } + public string[]? CTextArray { get; init; } + public int[]? CIntegerArray { get; init; } + public decimal[]? CDecimalArray { get; init; } + public DateTime[]? CDateArray { get; init; } + public DateTime[]? CTimestampArray { get; init; } }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + public async Task GetPostgresArrayTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); - queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); - return result.AsList(); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs { - public required Guid Id { get; init; } + public byte[]? CBytea { get; init; } }; - public class CreateBookArgs + public async Task InsertPostgresArrayTypesBatch(List args) { - public required string Name { get; init; } - public required long AuthorId { get; init; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; + public class GetPostgresArrayTypesCntRow + { + public byte[]? CBytea { get; init; } + public required long Cnt { get; init; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task GetPostgresArrayTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.QuerySingleAsync(CreateBookSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + return result; } } @@ -395,202 +532,56 @@ public async Task CreateBook(CreateBookArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public class ListAllAuthorsBooksRow - { - public required Author? Author { get; init; } - public required Book? Book { get; init; } - }; - public async Task> ListAllAuthorsBooks() + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public class GetDuplicateAuthorsRow - { - public required Author? Author { get; init; } - public required Author? Author2 { get; init; } - }; - public async Task> GetDuplicateAuthors() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public class GetAuthorsByBookNameRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - public required Book? Book { get; init; } - }; - public class GetAuthorsByBookNameArgs - { - public required string Name { get; init; } - }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public class InsertPostgresTypesArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } - public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); } return; @@ -601,69 +592,37 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs { - public bool? CBoolean { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } - public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public async Task InsertPostgresTypesBatch(List args) + public async Task InsertPostgresGeoTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); - await writer.WriteAsync(row.CUuid); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); } await writer.CompleteAsync(); @@ -671,91 +630,26 @@ public async Task InsertPostgresTypesBatch(List ar await connection.CloseAsync(); } - } - - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public class GetPostgresTypesRow - { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } - public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } - }; - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); - } - - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; - public class GetPostgresTypesCntRow - { - public short? CSmallint { get; init; } - public bool? CBoolean { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } - public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public required long Cnt { get; init; } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public async Task GetPostgresTypesCnt() + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); return result; } } @@ -765,25 +659,20 @@ public class GetPostgresTypesCntRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; - public class GetPostgresFunctionsRow - { - public int? MaxInteger { get; init; } - public string? MaxVarchar { get; init; } - public required DateTime MaxTimestamp { get; init; } - }; - public async Task GetPostgresFunctions() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); - return result; + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -791,20 +680,31 @@ public class GetPostgresFunctionsRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + public class GetAuthorRow + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public class GetAuthorArgs + { + public required string Name { get; init; } + }; + public async Task GetAuthor(GetAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -812,63 +712,64 @@ public async Task TruncatePostgresTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; - public class InsertPostgresUnstructuredTypesArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + public class ListAuthorsRow { - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public string? CXmlStringOverride { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + public class ListAuthorsArgs + { + public required int Offset { get; init; } + public required int Limit { get; init; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); + queryParams.Add("offset", args.Offset); + queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + var result = await connection.QueryAsync(ListAuthorsSql, queryParams); + return result.AsList(); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; - public class GetPostgresUnstructuredTypesRow + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow { - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public string? CXmlStringOverride { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task GetPostgresUnstructuredTypes() + public class CreateAuthorArgs + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); return result; } } @@ -878,20 +779,30 @@ public class GetPostgresUnstructuredTypesRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow + { + public required long Id { get; init; } + }; + public class CreateAuthorReturnIdArgs + { + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -899,38 +810,31 @@ public async Task TruncatePostgresUnstructuredTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; - public class InsertPostgresArrayTypesArgs + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + public class GetAuthorByIdRow { - public byte[]? CBytea { get; init; } - public bool[]? CBooleanArray { get; init; } - public string[]? CTextArray { get; init; } - public int[]? CIntegerArray { get; init; } - public decimal[]? CDecimalArray { get; init; } - public DateTime[]? CDateArray { get; init; } - public DateTime[]? CTimestampArray { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + public class GetAuthorByIdArgs + { + public required long Id { get; init; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bytea", args.CBytea); - queryParams.Add("c_boolean_array", args.CBooleanArray); - queryParams.Add("c_text_array", args.CTextArray); - queryParams.Add("c_integer_array", args.CIntegerArray); - queryParams.Add("c_decimal_array", args.CDecimalArray); - queryParams.Add("c_date_array", args.CDateArray); - queryParams.Add("c_timestamp_array", args.CTimestampArray); + queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -938,29 +842,55 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + public class GetAuthorByNamePatternRow { - public byte[]? CBytea { get; init; } - public bool[]? CBooleanArray { get; init; } - public string[]? CTextArray { get; init; } - public int[]? CIntegerArray { get; init; } - public decimal[]? CDecimalArray { get; init; } - public DateTime[]? CDateArray { get; init; } - public DateTime[]? CTimestampArray { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task GetPostgresArrayTypes() + public class GetAuthorByNamePatternArgs { + public string? NamePattern { get; init; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); - return result; + var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); + return result.AsList(); + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public class DeleteAuthorArgs + { + public required string Name { get; init; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -968,48 +898,44 @@ public class GetPostgresArrayTypesRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs - { - public byte[]? CBytea { get; init; } - }; - public async Task InsertPostgresArrayTypesBatch(List args) + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea); - } - - await writer.CompleteAsync(); + await connection.ExecuteAsync(TruncateAuthorsSql); } - await connection.CloseAsync(); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } + + await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; - public class GetPostgresArrayTypesCntRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public class UpdateAuthorsArgs { - public byte[]? CBytea { get; init; } - public required long Cnt { get; init; } + public string? Bio { get; init; } }; - public async Task GetPostgresArrayTypesCnt() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); - return result; + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } } @@ -1018,59 +944,90 @@ public class GetPostgresArrayTypesCntRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + public class GetAuthorsByIdsRow + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public class GetAuthorsByIdsArgs + { + public required long[] LongArr1 { get; init; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); + return result.AsList(); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + public class GetAuthorsByIdsAndNamesRow + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public class GetAuthorsByIdsAndNamesArgs + { + public required long[] LongArr1 { get; init; } + public required string[] StringArr2 { get; init; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("stringArr_2", args.StringArr2); + if (this.Transaction == null) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); + return result.AsList(); + } } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public required Guid Id { get; init; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public class CreateBookArgs + { + public required string Name { get; init; } + public required long AuthorId { get; init; } + }; + public async Task CreateBook(CreateBookArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1078,94 +1035,137 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs + private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public class ListAllAuthorsBooksRow { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public required Author? Author { get; init; } + public required Book? Book { get; init; } }; - public async Task InsertPostgresGeoTypesBatch(List args) + public async Task> ListAllAuthorsBooks() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public class GetDuplicateAuthorsRow { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public required Author? Author { get; init; } + public required Author? Author2 { get; init; } }; - public async Task GetPostgresGeoTypes() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); - return result; + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public class GetAuthorsByBookNameRow + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + public required Book? Book { get; init; } + }; + public class GetAuthorsByBookNameArgs + { + public required string Name { get; init; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + { + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index 9b7fdd3d..6540a548 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/authors/schema.sql", - "examples/config/postgresql/types/schema.sql" + "examples/config/postgresql/types/schema.sql", + "examples/config/postgresql/authors/schema.sql" ], "queries": [ - "examples/config/postgresql/authors/query.sql", - "examples/config/postgresql/types/query.sql" + "examples/config/postgresql/types/query.sql", + "examples/config/postgresql/authors/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperExample", @@ -25,96 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "postgres_types" @@ -624,6 +534,96 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ] }, @@ -32473,769 +32473,417 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_bit", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "number": 2, + "number": 3, "column": { - "name": "limit", - "notNull": true, + "name": "c_smallint", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "pg_catalog.int4" }, - "originalName": "id" + "originalName": "c_integer" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_bigint", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.int8" }, - "originalName": "name" + "originalName": "c_bigint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_real", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "bio" + "originalName": "c_real" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_numeric", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "name" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_decimal", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "bio" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "id", - "notNull": true, + "name": "c_money", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "money" }, - "originalName": "id" + "originalName": "c_money" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 11, + "column": { + "name": "c_date", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 12, "column": { - "name": "name_pattern", + "name": "c_time", "length": -1, "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.time" }, - "originalName": "name" + "originalName": "c_time" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 13, "column": { - "name": "name", - "notNull": true, + "name": "c_timestamp", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 14, "column": { - "name": "bio", + "name": "c_timestamp_with_tz", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamptz" }, - "originalName": "bio" + "originalName": "c_timestamp_with_tz" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 15, + "column": { + "name": "c_interval", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 16, "column": { - "notNull": true, - "isArray": true, + "name": "c_char", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.bpchar" }, - "arrayDims": 1 + "originalName": "c_char" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 17, + "column": { + "name": "c_varchar", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 18, "column": { - "notNull": true, - "isArray": true, + "name": "c_character_varying", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.varchar" }, - "arrayDims": 1 + "originalName": "c_character_varying" } }, { - "number": 2, + "number": 19, "column": { - "notNull": true, - "isArray": true, + "name": "c_bpchar", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "text" + "name": "bpchar" }, - "arrayDims": 1 + "originalName": "c_bpchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 20, "column": { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { "name": "text" }, - "originalName": "name" + "originalName": "c_text" } }, { - "number": 2, + "number": 21, "column": { - "name": "author_id", - "notNull": true, + "name": "c_uuid", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_uuid" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 22, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 23, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 24, + "column": { + "name": "c_macaddr", + "length": -1, + "type": { + "name": "macaddr" + } } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 25, "column": { - "name": "name", - "notNull": true, + "name": "c_macaddr8", "length": -1, - "table": { - "name": "books" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "macaddr8" + } } } ], - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { "name": "c_boolean", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33248,26 +32896,9 @@ }, { "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, "column": { "name": "c_smallint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33279,11 +32910,10 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_integer", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33295,11 +32925,10 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_bigint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33311,11 +32940,10 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_real", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33327,11 +32955,10 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_numeric", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33343,11 +32970,10 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_decimal", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33359,386 +32985,7 @@ } }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" - } - }, - { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" - } - }, - { - "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 22, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 23, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 24, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 25, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } - } - ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } - }, - { - "number": 2, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, - { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } - }, - { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } - }, - { - "number": 6, - "column": { - "name": "c_numeric", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, - { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } - }, - { - "number": 8, + "number": 8, "column": { "name": "c_double_precision", "length": -1, @@ -35376,6 +34623,759 @@ "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + }, + { + "number": 2, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + }, + { + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 265c9e1c..f6eb1d32 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -1,19 +1,9 @@ д 2 -postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb▄ +postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunner╓ч public"кpublicГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtextс +./dist/LocalRunner╓ч public"кpublicс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -88,7 +78,17 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ +c_circle0         Rpostgres_geometric_typesbcircleГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext" pg_temp"ц▓ pg_catalogЙ & @@ -10209,98 +10209,7 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_noР -9SELECT id, name, bio FROM authors -WHERE name = $1 LIMIT 1 GetAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*0, -name0         R authorsbtextzname: query.sql║ -CSELECT id, name, bio -FROM authors -ORDER BY name -LIMIT $2 -OFFSET $1 ListAuthors:many"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*&" -offset0         8b integer*%! -limit0         8b integer: query.sqlн -OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*95 -id0         Rpublicauthorsb  bigserialzid*84 -name0         Rpublicauthorsbtextzname*40 -bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ - GetAuthor(GetAuthorArgs args) + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); - return result; + await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -73,64 +116,114 @@ public async Task GetAuthor(GetAuthorArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public int Offset { get; set; } - public int Limit { get; set; } + public bool? CBoolean { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } }; - public async Task> ListAuthors(ListAuthorsArgs args) + public async Task InsertPostgresTypesBatch(List args) { - var queryParams = new Dictionary(); - queryParams.Add("offset", args.Offset); - queryParams.Add("limit", args.Limit); - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { - var result = await connection.QueryAsync(ListAuthorsSql, queryParams); - return result.AsList(); + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); + await writer.WriteAsync(row.CUuid); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } + + await writer.CompleteAsync(); } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); + await connection.CloseAsync(); + } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class CreateAuthorArgs + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public class GetPostgresTypesRow { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task CreateAuthor(CreateAuthorArgs args) + public async Task GetPostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); return result; } } @@ -140,29 +233,45 @@ public async Task CreateAuthor(CreateAuthorArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow - { - public long Id { get; set; } - }; - public class CreateAuthorReturnIdArgs + private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + public class GetPostgresTypesCntRow { - public string Name { get; set; } - public string Bio { get; set; } + public short? CSmallint { get; set; } + public bool? CBoolean { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task GetPostgresTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + return result; } } @@ -171,29 +280,23 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; - public class GetAuthorByIdRow + private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + public class GetPostgresFunctionsRow { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public int? MaxInteger { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } }; - public class GetAuthorByIdArgs - { - public long Id { get; set; } - }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresFunctions() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); return result; } } @@ -203,52 +306,54 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs - { - public string NamePattern { get; set; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); - return result.AsList(); + await connection.ExecuteAsync(TruncatePostgresTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public class DeleteAuthorArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public string Name { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); } return; @@ -259,20 +364,28 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow + { + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } + }; + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncateAuthorsSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -280,24 +393,20 @@ public async Task TruncateAuthors() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public class UpdateAuthorsArgs - { - public string Bio { get; set; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -305,89 +414,117 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + public class InsertPostgresArrayTypesArgs { - public long[] LongArr1 { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("c_bytea", args.CBytea); + queryParams.Add("c_boolean_array", args.CBooleanArray); + queryParams.Add("c_text_array", args.CTextArray); + queryParams.Add("c_integer_array", args.CIntegerArray); + queryParams.Add("c_decimal_array", args.CDecimalArray); + queryParams.Add("c_date_array", args.CDateArray); + queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); - return result.AsList(); + await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public long[] LongArr1 { get; set; } - public string[] StringArr2 { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + public async Task GetPostgresArrayTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); - queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); - return result.AsList(); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs { - public Guid Id { get; set; } + public byte[] CBytea { get; set; } }; - public class CreateBookArgs + public async Task InsertPostgresArrayTypesBatch(List args) { - public string Name { get; set; } - public long AuthorId { get; set; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; + public class GetPostgresArrayTypesCntRow + { + public byte[] CBytea { get; set; } + public long Cnt { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task GetPostgresArrayTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - return await connection.QuerySingleAsync(CreateBookSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + return result; } } @@ -396,202 +533,56 @@ public async Task CreateBook(CreateBookArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public class ListAllAuthorsBooksRow - { - public Author Author { get; set; } - public Book Book { get; set; } - }; - public async Task> ListAllAuthorsBooks() + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } + + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public class GetDuplicateAuthorsRow - { - public Author Author { get; set; } - public Author Author2 { get; set; } - }; - public async Task> GetDuplicateAuthors() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs - { - public string Name { get; set; } - }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public class InsertPostgresTypesArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); } return; @@ -602,69 +593,37 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task InsertPostgresTypesBatch(List args) + public async Task InsertPostgresGeoTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); - await writer.WriteAsync(row.CUuid); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); } await writer.CompleteAsync(); @@ -672,91 +631,26 @@ public async Task InsertPostgresTypesBatch(List ar await connection.CloseAsync(); } - } - - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public class GetPostgresTypesRow - { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } - }; - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); - } - - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; - public class GetPostgresTypesCntRow - { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public long Cnt { get; set; } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task GetPostgresTypesCnt() + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); return result; } } @@ -766,25 +660,20 @@ public async Task GetPostgresTypesCnt() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; - public class GetPostgresFunctionsRow - { - public int? MaxInteger { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } - }; - public async Task GetPostgresFunctions() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); - return result; + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -792,20 +681,31 @@ public async Task GetPostgresFunctions() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + public class GetAuthorRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorArgs + { + public string Name { get; set; } + }; + public async Task GetAuthor(GetAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -813,63 +713,64 @@ public async Task TruncatePostgresTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; - public class InsertPostgresUnstructuredTypesArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + public class ListAuthorsRow { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + public class ListAuthorsArgs + { + public int Offset { get; set; } + public int Limit { get; set; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); + queryParams.Add("offset", args.Offset); + queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + var result = await connection.QueryAsync(ListAuthorsSql, queryParams); + return result.AsList(); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; - public class GetPostgresUnstructuredTypesRow + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresUnstructuredTypes() + public class CreateAuthorArgs + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); return result; } } @@ -879,20 +780,30 @@ public async Task GetPostgresUnstructuredTypes( throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow + { + public long Id { get; set; } + }; + public class CreateAuthorReturnIdArgs + { + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -900,38 +811,31 @@ public async Task TruncatePostgresUnstructuredTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; - public class InsertPostgresArrayTypesArgs + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + public class GetAuthorByIdRow { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bytea", args.CBytea); - queryParams.Add("c_boolean_array", args.CBooleanArray); - queryParams.Add("c_text_array", args.CTextArray); - queryParams.Add("c_integer_array", args.CIntegerArray); - queryParams.Add("c_decimal_array", args.CDecimalArray); - queryParams.Add("c_date_array", args.CDateArray); - queryParams.Add("c_timestamp_array", args.CTimestampArray); + queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -939,29 +843,55 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + public class GetAuthorByNamePatternRow { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresArrayTypes() + public class GetAuthorByNamePatternArgs { + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); - return result; + var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); + return result.AsList(); + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -969,48 +899,44 @@ public async Task GetPostgresArrayTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs - { - public byte[] CBytea { get; set; } - }; - public async Task InsertPostgresArrayTypesBatch(List args) + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea); - } - - await writer.CompleteAsync(); + await connection.ExecuteAsync(TruncateAuthorsSql); } - await connection.CloseAsync(); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } + + await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; - public class GetPostgresArrayTypesCntRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public class UpdateAuthorsArgs { - public byte[] CBytea { get; set; } - public long Cnt { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresArrayTypesCnt() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); - return result; + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } } @@ -1019,59 +945,90 @@ public async Task GetPostgresArrayTypesCnt() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + public class GetAuthorsByIdsRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsArgs + { + public long[] LongArr1 { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); + return result.AsList(); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + public class GetAuthorsByIdsAndNamesRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsAndNamesArgs + { + public long[] LongArr1 { get; set; } + public string[] StringArr2 { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("stringArr_2", args.StringArr2); + if (this.Transaction == null) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); + return result.AsList(); + } } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public Guid Id { get; set; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1079,95 +1036,138 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs + private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public class ListAllAuthorsBooksRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public Author Author { get; set; } + public Book Book { get; set; } }; - public async Task InsertPostgresGeoTypesBatch(List args) + public async Task> ListAllAuthorsBooks() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public class GetDuplicateAuthorsRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); - return result; + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public class GetAuthorsByBookNameRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } + }; + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + { + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 72f60daa..5025d042 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/authors/schema.sql", - "examples/config/postgresql/types/schema.sql" + "examples/config/postgresql/types/schema.sql", + "examples/config/postgresql/authors/schema.sql" ], "queries": [ - "examples/config/postgresql/authors/query.sql", - "examples/config/postgresql/types/query.sql" + "examples/config/postgresql/types/query.sql", + "examples/config/postgresql/authors/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperLegacyExample", @@ -25,96 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "postgres_types" @@ -624,6 +534,96 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ] }, @@ -32473,769 +32473,417 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_bit", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "number": 2, + "number": 3, "column": { - "name": "limit", - "notNull": true, + "name": "c_smallint", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "pg_catalog.int4" }, - "originalName": "id" + "originalName": "c_integer" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_bigint", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.int8" }, - "originalName": "name" + "originalName": "c_bigint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_real", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "bio" + "originalName": "c_real" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_numeric", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "name" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_decimal", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "bio" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "id", - "notNull": true, + "name": "c_money", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "money" }, - "originalName": "id" + "originalName": "c_money" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 11, + "column": { + "name": "c_date", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 12, "column": { - "name": "name_pattern", + "name": "c_time", "length": -1, "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.time" }, - "originalName": "name" + "originalName": "c_time" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 13, "column": { - "name": "name", - "notNull": true, + "name": "c_timestamp", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 14, "column": { - "name": "bio", + "name": "c_timestamp_with_tz", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamptz" }, - "originalName": "bio" + "originalName": "c_timestamp_with_tz" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 15, + "column": { + "name": "c_interval", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 16, "column": { - "notNull": true, - "isArray": true, + "name": "c_char", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.bpchar" }, - "arrayDims": 1 + "originalName": "c_char" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 17, + "column": { + "name": "c_varchar", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 18, "column": { - "notNull": true, - "isArray": true, + "name": "c_character_varying", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.varchar" }, - "arrayDims": 1 + "originalName": "c_character_varying" } }, { - "number": 2, + "number": 19, "column": { - "notNull": true, - "isArray": true, + "name": "c_bpchar", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "text" + "name": "bpchar" }, - "arrayDims": 1 + "originalName": "c_bpchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 20, "column": { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { "name": "text" }, - "originalName": "name" + "originalName": "c_text" } }, { - "number": 2, + "number": 21, "column": { - "name": "author_id", - "notNull": true, + "name": "c_uuid", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_uuid" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 22, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 23, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 24, + "column": { + "name": "c_macaddr", + "length": -1, + "type": { + "name": "macaddr" + } } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 25, "column": { - "name": "name", - "notNull": true, + "name": "c_macaddr8", "length": -1, - "table": { - "name": "books" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "macaddr8" + } } } ], - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { "name": "c_boolean", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33248,26 +32896,9 @@ }, { "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, "column": { "name": "c_smallint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33279,11 +32910,10 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_integer", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33295,11 +32925,10 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_bigint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33311,11 +32940,10 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_real", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33327,11 +32955,10 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_numeric", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33343,11 +32970,10 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_decimal", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33359,386 +32985,7 @@ } }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" - } - }, - { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" - } - }, - { - "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 22, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 23, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 24, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 25, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } - } - ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } - }, - { - "number": 2, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, - { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } - }, - { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } - }, - { - "number": 6, - "column": { - "name": "c_numeric", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, - { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } - }, - { - "number": 8, + "number": 8, "column": { "name": "c_double_precision", "length": -1, @@ -35376,6 +34623,759 @@ "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + }, + { + "number": 2, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + }, + { + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 1924081a..17b5a68c 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -1,19 +1,9 @@ ╕ 2 -postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbЁ +postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner╓ч public"кpublicГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtextс +./dist/LocalRunner╓ч public"кpublicс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -88,7 +78,17 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ +c_circle0         Rpostgres_geometric_typesbcircleГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext" pg_temp"ц▓ pg_catalogЙ & @@ -10209,98 +10209,7 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_noР -9SELECT id, name, bio FROM authors -WHERE name = $1 LIMIT 1 GetAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*0, -name0         R authorsbtextzname: query.sql║ -CSELECT id, name, bio -FROM authors -ORDER BY name -LIMIT $2 -OFFSET $1 ListAuthors:many"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*&" -offset0         8b integer*%! -limit0         8b integer: query.sqlн -OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*95 -id0         Rpublicauthorsb  bigserialzid*84 -name0         Rpublicauthorsbtextzname*40 -bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ - GetAuthor(GetAuthorArgs args) + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorSql)) + using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -77,91 +89,122 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; + command.CommandText = InsertPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; - public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); - public readonly record struct ListAuthorsArgs(int Offset, int Limit); - public async Task> ListAuthors(ListAuthorsArgs args) + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); + public async Task InsertPostgresTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { - using (var command = connection.CreateCommand(ListAuthorsSql)) + foreach (var row in args) { - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); } - } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAuthorsSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public readonly record struct CreateAuthorRow(long Id, string Name, string? Bio); - public readonly record struct CreateAuthorArgs(long Id, string Name, string? Bio); - public async Task CreateAuthor(CreateAuthorArgs args) + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task GetPostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresTypesSql)) { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), + CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), + CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), + CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), + CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), + CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CText = reader.IsDBNull(19) ? null : reader.GetString(19), + CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), + CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -178,20 +221,39 @@ public async Task> ListAuthors(ListAuthorsArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorSql; + command.CommandText = GetPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), + CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), + CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), + CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), + CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), + CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CText = reader.IsDBNull(19) ? null : reader.GetString(19), + CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), + CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -200,62 +262,46 @@ public async Task> ListAuthors(ListAuthorsArgs args) return null; } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public readonly record struct CreateAuthorReturnIdRow(long Id); - public readonly record struct CreateAuthorReturnIdArgs(string Name, string? Bio); - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateAuthorReturnIdSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); - } - } - - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; - public readonly record struct GetAuthorByIdRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorByIdArgs(long Id); - public async Task GetAuthorById(GetAuthorByIdArgs args) + private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorByIdSql)) + using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) { - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), + CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), + CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), + CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), + CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), + CChar = reader.IsDBNull(14) ? null : reader.GetString(14), + CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), + CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), + CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CText = reader.IsDBNull(18) ? null : reader.GetString(18), + CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), + CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + Cnt = reader.GetInt64(23) }; } } @@ -272,18 +318,38 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = GetPostgresTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), + CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), + CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), + CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), + CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), + CChar = reader.IsDBNull(14) ? null : reader.GetString(14), + CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), + CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), + CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CText = reader.IsDBNull(18) ? null : reader.GetString(18), + CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), + CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + Cnt = reader.GetInt64(23) }; } } @@ -292,57 +358,69 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; - public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) + using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByNamePatternSql; + command.CommandText = GetPostgresFunctionsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } } } + + return null; } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public readonly record struct DeleteAuthorArgs(string Name); - public async Task DeleteAuthor(DeleteAuthorArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(DeleteAuthorSql)) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -357,22 +435,28 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = TruncatePostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncateAuthorsSql)) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -387,26 +471,52 @@ public async Task TruncateAuthors() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateAuthorsSql; + command.CommandText = InsertPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public readonly record struct UpdateAuthorsArgs(string? Bio); - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(UpdateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } + } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -416,112 +526,136 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = UpdateAuthorsSql; + command.CommandText = GetPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } + } } + + return null; } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; - public readonly record struct GetAuthorsByIdsRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorsByIdsArgs(long[] LongArr1); - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) + using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsSql; + command.CommandText = TruncatePostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; - public readonly record struct GetAuthorsByIdsAndNamesRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorsByIdsAndNamesArgs(long[] LongArr1, string[] StringArr2); - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) + using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsAndNamesSql; + command.CommandText = InsertPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public readonly record struct CreateBookRow(Guid Id); - public readonly record struct CreateBookArgs(string Name, long AuthorId); - public async Task CreateBook(CreateBookArgs args) + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public readonly record struct GetPostgresArrayTypesRow(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateBookSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -531,164 +665,149 @@ public async Task CreateBook(CreateBookArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = GetPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); - } - } - - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); - public async Task> ListAllAuthorsBooks() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var reader = await command.ExecuteReaderAsync()) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + if (await reader.ReadAsync()) { - using (var reader = await command.ExecuteReaderAsync()) + return new GetPostgresArrayTypesRow { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; } } } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + return null; + } + + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea); + public async Task InsertPostgresArrayTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); - public async Task> GetDuplicateAuthors() + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; + public readonly record struct GetPostgresArrayTypesCntRow(byte[]? CBytea, long Cnt); + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetPostgresArrayTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); - public readonly record struct GetAuthorsByBookNameArgs(string Name); - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = TruncatePostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public readonly record struct InsertPostgresGeoTypesArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresTypesSql)) + using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -703,72 +822,38 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresTypesSql; + command.CommandText = InsertPostgresGeoTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + } + + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresGeoTypesBatchArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); } await writer.CompleteAsync(); @@ -778,47 +863,29 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); - public async Task GetPostgresTypes() + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetPostgresGeoTypesRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) }; } } @@ -835,39 +902,21 @@ public async Task InsertPostgresTypesBatch(List ar using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesSql; + command.CommandText = GetPostgresGeoTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetPostgresGeoTypesRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) }; } } @@ -876,46 +925,56 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; - public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); - public async Task GetPostgresTypesCnt() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresGeoTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + public readonly record struct GetAuthorRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorArgs(string Name); + public async Task GetAuthor(GetAuthorArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetAuthorSql)) { + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesCntRow + return new GetAuthorRow { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -932,38 +991,18 @@ public async Task InsertPostgresTypesBatch(List ar using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesCntSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesCntRow + return new GetAuthorRow { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -972,74 +1011,78 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; - public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); - public async Task GetPostgresFunctions() + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); + public readonly record struct ListAuthorsArgs(int Offset, int Limit); + public async Task> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) + using (var command = connection.CreateCommand(ListAuthorsSql)) { + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresFunctionsSql; + command.CommandText = ListAuthorsSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public readonly record struct CreateAuthorRow(long Id, string Name, string? Bio); + public readonly record struct CreateAuthorArgs(long Id, string Name, string? Bio); + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new CreateAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1049,33 +1092,45 @@ public async Task TruncatePostgresTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresTypesSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new CreateAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } + + return null; } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; - public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public readonly record struct CreateAuthorReturnIdRow(long Id); + public readonly record struct CreateAuthorReturnIdArgs(string Name, string? Bio); + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) { - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1085,45 +1140,36 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresUnstructuredTypesSql; + command.CommandText = CreateAuthorReturnIdSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; - public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); - public async Task GetPostgresUnstructuredTypes() + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + public readonly record struct GetAuthorByIdRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorByIdArgs(long Id); + public async Task GetAuthorById(GetAuthorByIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByIdSql)) { + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetAuthorByIdRow { - CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1140,25 +1186,18 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetAuthorByIdRow { - CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1167,52 +1206,57 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresUnstructuredTypesSql; + command.CommandText = GetAuthorByNamePatternSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; - public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public readonly record struct DeleteAuthorArgs(string Name); + public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(DeleteAuthorSql)) { - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -1227,49 +1271,27 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresArrayTypesSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public readonly record struct GetPostgresArrayTypesRow(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); - public async Task GetPostgresArrayTypes() + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(TruncateAuthorsSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1279,154 +1301,141 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesSql; + command.CommandText = TruncateAuthorsSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } + await command.ExecuteNonQueryAsync(); } - - return null; } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea); - public async Task InsertPostgresArrayTypesBatch(List args) + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public readonly record struct UpdateAuthorsArgs(string? Bio); + public async Task UpdateAuthors(UpdateAuthorsArgs args) { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(UpdateAuthorsSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = UpdateAuthorsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; - public readonly record struct GetPostgresArrayTypesCntRow(byte[]? CBytea, long Cnt); - public async Task GetPostgresArrayTypesCnt() + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + public readonly record struct GetAuthorsByIdsRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorsByIdsArgs(long[] LongArr1); + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) { + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesCntRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesCntSql; + command.CommandText = GetAuthorsByIdsSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesCntRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + public readonly record struct GetAuthorsByIdsAndNamesRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorsByIdsAndNamesArgs(long[] LongArr1, string[] StringArr2); + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresArrayTypesSql; + command.CommandText = GetAuthorsByIdsAndNamesSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public readonly record struct InsertPostgresGeoTypesArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public readonly record struct CreateBookRow(Guid Id); + public readonly record struct CreateBookArgs(string Name, long AuthorId); + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(CreateBookSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1436,135 +1445,126 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresGeoTypesBatchArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task InsertPostgresGeoTypesBatch(List args) + private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); + public async Task> ListAllAuthorsBooks() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task GetPostgresGeoTypes() + private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresGeoTypesRow - { - CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresGeoTypesSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresGeoTypesRow - { - CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); + public readonly record struct GetAuthorsByBookNameArgs(string Name); + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresGeoTypesSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } } \ No newline at end of file diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index a0a6f84d..4f335f7b 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/authors/schema.sql", - "examples/config/postgresql/types/schema.sql" + "examples/config/postgresql/types/schema.sql", + "examples/config/postgresql/authors/schema.sql" ], "queries": [ - "examples/config/postgresql/authors/query.sql", - "examples/config/postgresql/types/query.sql" + "examples/config/postgresql/types/query.sql", + "examples/config/postgresql/authors/query.sql" ], "codegen": { "out": "examples/NpgsqlExample", @@ -25,96 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "postgres_types" @@ -624,6 +534,96 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ] }, @@ -32473,769 +32473,417 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_bit", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "number": 2, + "number": 3, "column": { - "name": "limit", - "notNull": true, + "name": "c_smallint", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "pg_catalog.int4" }, - "originalName": "id" + "originalName": "c_integer" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_bigint", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.int8" }, - "originalName": "name" + "originalName": "c_bigint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_real", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "bio" + "originalName": "c_real" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_numeric", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "name" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_decimal", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "bio" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "id", - "notNull": true, + "name": "c_money", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "money" }, - "originalName": "id" + "originalName": "c_money" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 11, + "column": { + "name": "c_date", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 12, "column": { - "name": "name_pattern", + "name": "c_time", "length": -1, "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.time" }, - "originalName": "name" + "originalName": "c_time" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 13, "column": { - "name": "name", - "notNull": true, + "name": "c_timestamp", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 14, "column": { - "name": "bio", + "name": "c_timestamp_with_tz", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamptz" }, - "originalName": "bio" + "originalName": "c_timestamp_with_tz" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 15, + "column": { + "name": "c_interval", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 16, "column": { - "notNull": true, - "isArray": true, + "name": "c_char", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.bpchar" }, - "arrayDims": 1 + "originalName": "c_char" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 17, + "column": { + "name": "c_varchar", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 18, "column": { - "notNull": true, - "isArray": true, + "name": "c_character_varying", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.varchar" }, - "arrayDims": 1 + "originalName": "c_character_varying" } }, { - "number": 2, + "number": 19, "column": { - "notNull": true, - "isArray": true, + "name": "c_bpchar", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "text" + "name": "bpchar" }, - "arrayDims": 1 + "originalName": "c_bpchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 20, "column": { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { "name": "text" }, - "originalName": "name" + "originalName": "c_text" } }, { - "number": 2, + "number": 21, "column": { - "name": "author_id", - "notNull": true, + "name": "c_uuid", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_uuid" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 22, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 23, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 24, + "column": { + "name": "c_macaddr", + "length": -1, + "type": { + "name": "macaddr" + } } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 25, "column": { - "name": "name", - "notNull": true, + "name": "c_macaddr8", "length": -1, - "table": { - "name": "books" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "macaddr8" + } } } ], - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { "name": "c_boolean", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33248,26 +32896,9 @@ }, { "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, "column": { "name": "c_smallint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33279,11 +32910,10 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_integer", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33295,11 +32925,10 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_bigint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33311,11 +32940,10 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_real", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33327,11 +32955,10 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_numeric", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33343,11 +32970,10 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_decimal", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33359,386 +32985,7 @@ } }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" - } - }, - { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" - } - }, - { - "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 22, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 23, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 24, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 25, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } - } - ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } - }, - { - "number": 2, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, - { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } - }, - { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } - }, - { - "number": 6, - "column": { - "name": "c_numeric", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, - { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } - }, - { - "number": 8, + "number": 8, "column": { "name": "c_double_precision", "length": -1, @@ -35376,6 +34623,759 @@ "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + }, + { + "number": 2, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + }, + { + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 755f93db..52fe019a 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -1,19 +1,9 @@ Щ 2 -postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb╤ +postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunner╓ч public"кpublicГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtextс +./dist/LocalRunner╓ч public"кpublicс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -88,7 +78,17 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ +c_circle0         Rpostgres_geometric_typesbcircleГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext" pg_temp"ц▓ pg_catalogЙ & @@ -10209,98 +10209,7 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_noР -9SELECT id, name, bio FROM authors -WHERE name = $1 LIMIT 1 GetAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*0, -name0         R authorsbtextzname: query.sql║ -CSELECT id, name, bio -FROM authors -ORDER BY name -LIMIT $2 -OFFSET $1 ListAuthors:many"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*&" -offset0         8b integer*%! -limit0         8b integer: query.sqlн -OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*95 -id0         Rpublicauthorsb  bigserialzid*84 -name0         Rpublicauthorsbtextzname*40 -bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ - GetAuthor(GetAuthorArgs args) + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorSql)) + using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -86,219 +117,174 @@ public async Task GetAuthor(GetAuthorArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } - } - - return null; - } - - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs - { - public int Offset { get; set; } - public int Limit { get; set; } - }; - public async Task> ListAuthors(ListAuthorsArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(ListAuthorsSql)) - { - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAuthorsSql; + command.CommandText = InsertPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class CreateAuthorArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public bool? CBoolean { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } }; - public async Task CreateAuthor(CreateAuthorArgs args) + public async Task InsertPostgresTypesBatch(List args) { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(CreateAuthorSql)) - { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new CreateAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) + using (var connection = new NpgsqlConnection(ConnectionString)) { - command.CommandText = CreateAuthorSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { - if (await reader.ReadAsync()) + foreach (var row in args) { - return new CreateAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); } - } - } - - return null; - } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow - { - public long Id { get; set; } - }; - public class CreateAuthorReturnIdArgs - { - public string Name { get; set; } - public string Bio { get; set; } - }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); - } + await writer.CompleteAsync(); } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateAuthorReturnIdSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); + await connection.CloseAsync(); } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + public class GetPostgresTypesRow { - public long Id { get; set; } + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorByIdSql)) + using (var command = connection.CreateCommand(GetPostgresTypesSql)) { - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), + CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), + CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), + CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), + CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), + CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CText = reader.IsDBNull(19) ? null : reader.GetString(19), + CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), + CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -315,18 +301,39 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = GetPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), + CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), + CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), + CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), + CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), + CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CText = reader.IsDBNull(19) ? null : reader.GetString(19), + CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), + CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), + CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) }; } } @@ -335,73 +342,159 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs + private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + public class GetPostgresTypesCntRow { - public string NamePattern { get; set; } + public short? CSmallint { get; set; } + public bool? CBoolean { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public Guid? CUuid { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) + using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), + CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), + CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), + CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), + CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), + CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), + CChar = reader.IsDBNull(14) ? null : reader.GetString(14), + CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), + CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), + CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CText = reader.IsDBNull(18) ? null : reader.GetString(18), + CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), + CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), + CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + Cnt = reader.GetInt64(23) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByNamePatternSql; + command.CommandText = GetPostgresTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), + CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), + CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), + CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), + CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), + CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), + CChar = reader.IsDBNull(14) ? null : reader.GetString(14), + CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), + CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), + CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CText = reader.IsDBNull(18) ? null : reader.GetString(18), + CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), + CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), + CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), + CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + Cnt = reader.GetInt64(23) + }; + } } } + + return null; } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; - public class DeleteAuthorArgs + private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + public class GetPostgresFunctionsRow { - public string Name { get; set; } + public int? MaxInteger { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(DeleteAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) { - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -411,21 +504,33 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = GetPostgresFunctionsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } + + return null; } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncateAuthorsSql)) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -441,29 +546,41 @@ public async Task TruncateAuthors() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateAuthorsSql; + command.CommandText = TruncatePostgresTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; - public class UpdateAuthorsArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + public class InsertPostgresUnstructuredTypesArgs { - public string Bio { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(UpdateAuthorsSql)) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -473,136 +590,156 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = UpdateAuthorsSql; + command.CommandText = InsertPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + public class GetPostgresUnstructuredTypesRow { - public long[] LongArr1 { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsSql; + command.CommandText = GetPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } } } + + return null; } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public long[] LongArr1 { get; set; } - public string[] StringArr2 { get; set; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) + using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsAndNamesSql; + command.CommandText = TruncatePostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public Guid Id { get; set; } - }; - public class CreateBookArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + public class InsertPostgresArrayTypesArgs { - public string Name { get; set; } - public long AuthorId { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateBookSql)) + using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -612,208 +749,226 @@ public async Task CreateBook(CreateBookArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = InsertPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; - public class ListAllAuthorsBooksRow + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public Author Author { get; set; } - public Book Book { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresArrayTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + return null; + } + + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs + { + public byte[] CBytea { get; set; } + }; + public async Task InsertPostgresArrayTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; - public class GetDuplicateAuthorsRow + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; + public class GetPostgresArrayTypesCntRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public byte[] CBytea { get; set; } + public long Cnt { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetPostgresArrayTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs - { - public string Name { get; set; } - }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = TruncatePostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; - public class InsertPostgresTypesArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + public class InsertPostgresGeoTypesArgs { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresTypesSql)) + using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -826,99 +981,49 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertPostgresTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs - { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresGeoTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task InsertPostgresTypesBatch(List args) + public async Task InsertPostgresGeoTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); } await writer.CompleteAsync(); @@ -928,74 +1033,38 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; - public class GetPostgresTypesRow + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task GetPostgresTypes() + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetPostgresGeoTypesRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) }; } } @@ -1012,39 +1081,21 @@ public async Task GetPostgresTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesSql; + command.CommandText = GetPostgresGeoTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetPostgresGeoTypesRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) }; } } @@ -1053,72 +1104,64 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; - public class GetPostgresTypesCntRow - { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public long Cnt { get; set; } - }; - public async Task GetPostgresTypesCnt() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresGeoTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + public class GetAuthorRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorArgs + { + public string Name { get; set; } + }; + public async Task GetAuthor(GetAuthorArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetAuthorSql)) + { + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1135,38 +1178,18 @@ public async Task GetPostgresTypesCnt() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesCntSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesCntRow + return new GetAuthorRow { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1175,79 +1198,97 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; - public class GetPostgresFunctionsRow + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + public class ListAuthorsRow { - public int? MaxInteger { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresFunctions() + public class ListAuthorsArgs + { + public int Offset { get; set; } + public int Limit { get; set; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) + using (var command = connection.CreateCommand(ListAuthorsSql)) { + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresFunctionsSql; + command.CommandText = ListAuthorsSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class CreateAuthorArgs + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new CreateAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1257,41 +1298,52 @@ public async Task TruncatePostgresTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresTypesSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new CreateAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } + + return null; } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; - public class InsertPostgresUnstructuredTypesArgs + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public long Id { get; set; } }; - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + public class CreateAuthorReturnIdArgs + { + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) { - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1301,53 +1353,44 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresUnstructuredTypesSql; + command.CommandText = CreateAuthorReturnIdSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; - public class GetPostgresUnstructuredTypesRow + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + public class GetAuthorByIdRow { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresUnstructuredTypes() + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByIdSql)) { + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresUnstructuredTypesRow - { - CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + if (await reader.ReadAsync()) + { + return new GetAuthorByIdRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1364,25 +1407,18 @@ public async Task GetPostgresUnstructuredTypes( using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetAuthorByIdRow { - CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -1391,15 +1427,68 @@ public async Task GetPostgresUnstructuredTypes( return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + public class GetAuthorByNamePatternRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorByNamePatternArgs + { + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) + { + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorByNamePatternSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(DeleteAuthorSql)) { + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -1414,38 +1503,22 @@ public async Task TruncatePostgresUnstructuredTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresUnstructuredTypesSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; - public class InsertPostgresArrayTypesArgs - { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } - }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(TruncateAuthorsSql)) { - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1460,58 +1533,29 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresArrayTypesSql; + command.CommandText = TruncateAuthorsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + public class UpdateAuthorsArgs { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresArrayTypes() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(UpdateAuthorsSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1521,124 +1565,136 @@ public async Task GetPostgresArrayTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesSql; + command.CommandText = UpdateAuthorsSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } - - return null; } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + public class GetAuthorsByIdsRow { - public byte[] CBytea { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresArrayTypesBatch(List args) + public class GetAuthorsByIdsArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public long[] LongArr1 { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + { + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorsByIdsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea , COUNT (* ) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1 "; - public class GetPostgresArrayTypesCntRow + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + public class GetAuthorsByIdsAndNamesRow { - public byte[] CBytea { get; set; } - public long Cnt { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresArrayTypesCnt() + public class GetAuthorsByIdsAndNamesArgs + { + public long[] LongArr1 { get; set; } + public string[] StringArr2 { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesCntRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesCntSql; + command.CommandText = GetAuthorsByIdsAndNamesSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesCntRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow + { + public Guid Id { get; set; } + }; + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) + using (var command = connection.CreateCommand(CreateBookSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1648,199 +1704,143 @@ public async Task TruncatePostgresArrayTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresArrayTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; - public class InsertPostgresGeoTypesArgs + private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + public class ListAllAuthorsBooksRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public Author Author { get; set; } + public Book Book { get; set; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = ListAllAuthorsBooksSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs - { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } - }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var reader = await command.ExecuteReaderAsync()) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } - - await connection.CloseAsync(); } } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + public class GetDuplicateAuthorsRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresGeoTypesRow - { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresGeoTypesSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresGeoTypesRow - { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + public class GetAuthorsByBookNameRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } + }; + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresGeoTypesSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } } diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index e5a51dac..c91f471c 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/authors/schema.sql", - "examples/config/postgresql/types/schema.sql" + "examples/config/postgresql/types/schema.sql", + "examples/config/postgresql/authors/schema.sql" ], "queries": [ - "examples/config/postgresql/authors/query.sql", - "examples/config/postgresql/types/query.sql" + "examples/config/postgresql/types/query.sql", + "examples/config/postgresql/authors/query.sql" ], "codegen": { "out": "examples/NpgsqlLegacyExample", @@ -25,96 +25,6 @@ { "name": "public", "tables": [ - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] - }, { "rel": { "name": "postgres_types" @@ -624,6 +534,96 @@ } } ] + }, + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] } ] }, @@ -32473,769 +32473,417 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_bit", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "number": 2, + "number": 3, "column": { - "name": "limit", - "notNull": true, + "name": "c_smallint", "length": -1, "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "integer" - } + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 4, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "pg_catalog.int4" }, - "originalName": "id" + "originalName": "c_integer" } }, { - "number": 2, + "number": 5, "column": { - "name": "name", - "notNull": true, + "name": "c_bigint", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.int8" }, - "originalName": "name" + "originalName": "c_bigint" } }, { - "number": 3, + "number": 6, "column": { - "name": "bio", + "name": "c_real", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "bio" + "originalName": "c_real" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 7, "column": { - "name": "name", - "notNull": true, + "name": "c_numeric", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "name" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 8, "column": { - "name": "bio", + "name": "c_decimal", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.numeric" }, - "originalName": "bio" + "originalName": "c_decimal" } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 10, "column": { - "name": "id", - "notNull": true, + "name": "c_money", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "bigserial" + "name": "money" }, - "originalName": "id" + "originalName": "c_money" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 11, + "column": { + "name": "c_date", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 12, "column": { - "name": "name_pattern", + "name": "c_time", "length": -1, "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.time" }, - "originalName": "name" + "originalName": "c_time" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 13, "column": { - "name": "name", - "notNull": true, + "name": "c_timestamp", "length": -1, + "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 14, "column": { - "name": "bio", + "name": "c_timestamp_with_tz", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "authors" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamptz" }, - "originalName": "bio" + "originalName": "c_timestamp_with_tz" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 15, + "column": { + "name": "c_interval", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 16, "column": { - "notNull": true, - "isArray": true, + "name": "c_char", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.bpchar" }, - "arrayDims": 1 + "originalName": "c_char" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 17, + "column": { + "name": "c_varchar", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 18, "column": { - "notNull": true, - "isArray": true, + "name": "c_character_varying", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "pg_catalog.varchar" }, - "arrayDims": 1 + "originalName": "c_character_varying" } }, { - "number": 2, + "number": 19, "column": { - "notNull": true, - "isArray": true, + "name": "c_bpchar", "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, "type": { - "name": "text" + "name": "bpchar" }, - "arrayDims": 1 + "originalName": "c_bpchar" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 20, "column": { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { "name": "text" }, - "originalName": "name" + "originalName": "c_text" } }, { - "number": 2, + "number": 21, "column": { - "name": "author_id", - "notNull": true, + "name": "c_uuid", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "books" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_uuid" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 22, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 23, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 24, + "column": { + "name": "c_macaddr", + "length": -1, + "type": { + "name": "macaddr" + } } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 25, "column": { - "name": "name", - "notNull": true, + "name": "c_macaddr8", "length": -1, - "table": { - "name": "books" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "macaddr8" + } } } ], - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { "name": "c_boolean", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33248,26 +32896,9 @@ }, { "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, "column": { "name": "c_smallint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33279,11 +32910,10 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_integer", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33295,11 +32925,10 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_bigint", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33311,11 +32940,10 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_real", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33327,11 +32955,10 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_numeric", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33343,11 +32970,10 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_decimal", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" @@ -33359,386 +32985,7 @@ } }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" - } - }, - { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" - } - }, - { - "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 22, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 23, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 24, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 25, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } - } - ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } - }, - { - "number": 2, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, - { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } - }, - { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } - }, - { - "number": 6, - "column": { - "name": "c_numeric", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, - { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } - }, - { - "number": 8, + "number": 8, "column": { "name": "c_double_precision", "length": -1, @@ -35376,6 +34623,759 @@ "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "offset", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + }, + { + "number": 2, + "column": { + "name": "limit", + "notNull": true, + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "public", + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + }, + { + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + }, + { + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + }, + { + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index bdb24833..491f45f0 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -1,19 +1,9 @@ н 2 -postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbх +postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner╓ч public"кpublicГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtextс +./dist/LocalRunner╓ч public"кpublicс postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -88,7 +78,17 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" pg_temp"ц▓ +c_circle0         Rpostgres_geometric_typesbcircleГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext" pg_temp"ц▓ pg_catalogЙ & @@ -10209,98 +10209,7 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_noР -9SELECT id, name, bio FROM authors -WHERE name = $1 LIMIT 1 GetAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*0, -name0         R authorsbtextzname: query.sql║ -CSELECT id, name, bio -FROM authors -ORDER BY name -LIMIT $2 -OFFSET $1 ListAuthors:many"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*&" -offset0         8b integer*%! -limit0         8b integer: query.sqlн -OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- -id0         R authorsb  bigserialzid", -name0         R authorsbtextzname"( -bio0         R authorsbtextzbio*95 -id0         Rpublicauthorsb  bigserialzid*84 -name0         Rpublicauthorsbtextzname*40 -bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ - /docker-entrypoint-initdb.d/schema.sql diff --git a/examples/config/mysql/authors/query.sql b/examples/config/mysql/authors/query.sql new file mode 100644 index 00000000..eb16646b --- /dev/null +++ b/examples/config/mysql/authors/query.sql @@ -0,0 +1,66 @@ +-- name: GetAuthor :one +SELECT * FROM authors WHERE name = ? LIMIT 1; + +-- name: ListAuthors :many +SELECT * +FROM authors +ORDER BY name +LIMIT ? OFFSET ?; + +-- name: CreateAuthor :exec +INSERT INTO authors (id, name, bio) VALUES (?, ?, ?); + +-- name: CreateAuthorReturnId :execlastid +INSERT INTO authors (name, bio) VALUES (?, ?); + +-- name: GetAuthorById :one +SELECT * FROM authors WHERE id = ? LIMIT 1; + +-- name: GetAuthorByNamePattern :many +SELECT * FROM authors +WHERE name LIKE COALESCE(sqlc.narg('name_pattern'), '%'); + +-- name: DeleteAuthor :exec +DELETE FROM authors +WHERE name = ?; + +-- name: DeleteAllAuthors :exec +DELETE FROM authors; + +-- name: UpdateAuthors :execrows +UPDATE authors +SET bio = sqlc.arg('bio') +WHERE bio IS NOT NULL; + +-- name: GetAuthorsByIds :many +SELECT * FROM authors WHERE id IN (sqlc.slice('ids')); + +-- name: GetAuthorsByIdsAndNames :many +SELECT * FROM authors WHERE id IN (sqlc.slice('ids')) AND name IN (sqlc.slice('names')); + +-- name: CreateBook :execlastid +INSERT INTO books (name, author_id) VALUES (?, ?); + +-- name: ListAllAuthorsBooks :many +SELECT sqlc.embed(authors), sqlc.embed(books) +FROM authors JOIN books ON authors.id = books.author_id +ORDER BY authors.name; + +-- name: GetDuplicateAuthors :many +SELECT sqlc.embed(authors1), sqlc.embed(authors2) +FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name +WHERE authors1.id < authors2.id; + +-- name: GetAuthorsByBookName :many +SELECT authors.*, sqlc.embed(books) +FROM authors JOIN books ON authors.id = books.author_id +WHERE books.name = ?; + +-- name: CreateExtendedBio :exec +INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?); + +-- name: GetFirstExtendedBioByType :one +SELECT * FROM extended.bios WHERE bio_type = ? LIMIT 1; + +-- name: TruncateExtendedBios :exec +TRUNCATE TABLE extended.bios; \ No newline at end of file diff --git a/examples/config/mysql/authors/schema.sql b/examples/config/mysql/authors/schema.sql new file mode 100644 index 00000000..290a188a --- /dev/null +++ b/examples/config/mysql/authors/schema.sql @@ -0,0 +1,23 @@ +CREATE TABLE authors ( + id BIGINT PRIMARY KEY AUTO_INCREMENT, + name TEXT NOT NULL, + bio TEXT +); + +CREATE TABLE books ( + id BIGINT PRIMARY KEY AUTO_INCREMENT, + name TEXT NOT NULL, + author_id BIGINT NOT NULL, + description TEXT, + FOREIGN KEY (author_id) REFERENCES authors (id) ON DELETE CASCADE +); + +CREATE SCHEMA extended; + +CREATE TABLE extended.bios ( + author_name VARCHAR(100), + name VARCHAR(100), + bio_type ENUM('Autobiography', 'Biography', 'Memoir'), + author_type SET('Author', 'Editor', 'Translator'), + PRIMARY KEY (author_name, name) +); diff --git a/examples/config/mysql/query.sql b/examples/config/mysql/query.sql deleted file mode 100644 index 570f4a33..00000000 --- a/examples/config/mysql/query.sql +++ /dev/null @@ -1,108 +0,0 @@ --- name: GetAuthor :one -SELECT * FROM authors WHERE name = ? LIMIT 1; - --- name: ListAuthors :many -SELECT * -FROM authors -ORDER BY name -LIMIT ? OFFSET ?; - --- name: CreateAuthor :exec -INSERT INTO authors (id, name, bio) VALUES (?, ?, ?); - --- name: CreateAuthorReturnId :execlastid -INSERT INTO authors (name, bio) VALUES (?, ?); - --- name: GetAuthorById :one -SELECT * FROM authors WHERE id = ? LIMIT 1; - --- name: GetAuthorByNamePattern :many -SELECT * FROM authors -WHERE name LIKE COALESCE(sqlc.narg('name_pattern'), '%'); - --- name: DeleteAuthor :exec -DELETE FROM authors -WHERE name = ?; - --- name: DeleteAllAuthors :exec -DELETE FROM authors; - --- name: UpdateAuthors :execrows -UPDATE authors -SET bio = sqlc.arg('bio') -WHERE bio IS NOT NULL; - --- name: GetAuthorsByIds :many -SELECT * FROM authors WHERE id IN (sqlc.slice('ids')); - --- name: GetAuthorsByIdsAndNames :many -SELECT * FROM authors WHERE id IN (sqlc.slice('ids')) AND name IN (sqlc.slice('names')); - --- name: CreateBook :execlastid -INSERT INTO books (name, author_id) VALUES (?, ?); - --- name: ListAllAuthorsBooks :many -SELECT sqlc.embed(authors), sqlc.embed(books) -FROM authors JOIN books ON authors.id = books.author_id -ORDER BY authors.name; - --- name: GetDuplicateAuthors :many -SELECT sqlc.embed(authors1), sqlc.embed(authors2) -FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name -WHERE authors1.id < authors2.id; - --- name: GetAuthorsByBookName :many -SELECT authors.*, sqlc.embed(books) -FROM authors JOIN books ON authors.id = books.author_id -WHERE books.name = ?; - --- name: InsertMysqlTypes :exec -INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); - --- name: InsertMysqlTypesBatch :copyfrom -INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); - --- name: GetMysqlTypes :one -SELECT * FROM mysql_types LIMIT 1; - --- name: GetMysqlTypesCnt :one -SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob -FROM mysql_types -GROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob -LIMIT 1; - --- name: GetMysqlFunctions :one -SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_types; - --- name: TruncateMysqlTypes :exec -TRUNCATE TABLE mysql_types; - --- name: CreateExtendedBio :exec -INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?); - --- name: GetFirstExtendedBioByType :one -SELECT * FROM extended.bios WHERE bio_type = ? LIMIT 1; - --- name: TruncateExtendedBios :exec -TRUNCATE TABLE extended.bios; \ No newline at end of file diff --git a/examples/config/mysql/types/query.sql b/examples/config/mysql/types/query.sql new file mode 100644 index 00000000..7b189a22 --- /dev/null +++ b/examples/config/mysql/types/query.sql @@ -0,0 +1,150 @@ +-- name: InsertMysqlTypes :exec +INSERT INTO mysql_types +( + c_bit, + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp, + c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: InsertMysqlTypesBatch :copyfrom +INSERT INTO mysql_types +( + c_bit, + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp, + c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: GetMysqlTypes :one +SELECT * FROM mysql_types LIMIT 1; + +-- name: GetMysqlTypesCnt :one +SELECT + COUNT(*) AS cnt, + c_bool, + c_boolean, + c_bit, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +FROM mysql_types +GROUP BY + c_bool, + c_boolean, + c_bit, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp, + c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +LIMIT 1; + +-- name: GetMysqlFunctions :one +SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_types; + +-- name: TruncateMysqlTypes :exec +TRUNCATE TABLE mysql_types; diff --git a/examples/config/mysql/schema.sql b/examples/config/mysql/types/schema.sql similarity index 70% rename from examples/config/mysql/schema.sql rename to examples/config/mysql/types/schema.sql index 9094a7d8..9a120c8f 100644 --- a/examples/config/mysql/schema.sql +++ b/examples/config/mysql/types/schema.sql @@ -1,17 +1,3 @@ -CREATE TABLE authors ( - id BIGINT PRIMARY KEY AUTO_INCREMENT, - name TEXT NOT NULL, - bio TEXT -); - -CREATE TABLE books ( - id BIGINT PRIMARY KEY AUTO_INCREMENT, - name TEXT NOT NULL, - author_id BIGINT NOT NULL, - description TEXT, - FOREIGN KEY (author_id) REFERENCES authors (id) ON DELETE CASCADE -); - CREATE TABLE mysql_types ( /* Boolean data types - TINYINT(1) synonyms */ c_bool BOOL, @@ -65,14 +51,4 @@ CREATE TABLE mysql_types ( c_blob BLOB, c_mediumblob MEDIUMBLOB, c_longblob LONGBLOB -); - -CREATE SCHEMA extended; - -CREATE TABLE extended.bios ( - author_name VARCHAR(100), - name VARCHAR(100), - bio_type ENUM ('Autobiography', 'Biography', 'Memoir'), - author_type SET ('Author', 'Editor', 'Translator'), - PRIMARY KEY (author_name, name) -); +); \ No newline at end of file diff --git a/sqlc.ci.yaml b/sqlc.ci.yaml index 0fccef64..b0c1ae6c 100644 --- a/sqlc.ci.yaml +++ b/sqlc.ci.yaml @@ -6,8 +6,8 @@ plugins: sha256: sql: # PostgresSQL - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -42,8 +42,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -78,8 +78,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -114,8 +114,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -152,8 +152,8 @@ sql: notNull: false # MySQL - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -180,8 +180,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -208,8 +208,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -236,8 +236,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp diff --git a/sqlc.local.generated.yaml b/sqlc.local.generated.yaml index 9de73108..1808008c 100644 --- a/sqlc.local.generated.yaml +++ b/sqlc.local.generated.yaml @@ -5,8 +5,8 @@ plugins: cmd: ./dist/LocalRunner sql: # PostgresSQL - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -41,8 +41,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -77,8 +77,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -113,8 +113,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -150,8 +150,8 @@ sql: type: "string" notNull: false # MySQL - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -178,8 +178,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -206,8 +206,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -234,8 +234,8 @@ sql: csharp_type: type: "string" notNull: false - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp diff --git a/sqlc.request.generated.yaml b/sqlc.request.generated.yaml index 50ed40ee..50a36241 100644 --- a/sqlc.request.generated.yaml +++ b/sqlc.request.generated.yaml @@ -5,8 +5,8 @@ plugins: cmd: ./dist/LocalRunner sql: # PostgresSQL - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -42,8 +42,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -79,8 +79,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -116,8 +116,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: ["examples/config/postgresql/authors/schema.sql", "examples/config/postgresql/types/schema.sql"] - queries: ["examples/config/postgresql/authors/query.sql", "examples/config/postgresql/types/query.sql"] + - schema: ["examples/config/postgresql/types/schema.sql", "examples/config/postgresql/authors/schema.sql"] + queries: ["examples/config/postgresql/types/query.sql", "examples/config/postgresql/authors/query.sql"] engine: "postgresql" codegen: - plugin: csharp @@ -154,8 +154,8 @@ sql: notNull: false debugRequest: true # MySQL - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -183,8 +183,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/authors/query.sql", "examples/config/mysql/types/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -212,8 +212,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp @@ -241,8 +241,8 @@ sql: type: "string" notNull: false debugRequest: true - - schema: "examples/config/mysql/schema.sql" - queries: "examples/config/mysql/query.sql" + - schema: ["examples/config/mysql/types/schema.sql", "examples/config/mysql/authors/schema.sql"] + queries: ["examples/config/mysql/types/query.sql", "examples/config/mysql/authors/query.sql"] engine: "mysql" codegen: - plugin: csharp From 3ac7709f49de8e54ff2969f7918705f74818cd53 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 13:10:30 +0200 Subject: [PATCH 07/33] fix: sql constant bad whitespace transformation --- CodeGenerator/Generators/QueriesGen.cs | 15 +- .../EndToEndScaffold/Templates/MySqlTests.cs | 18 +- .../MySqlConnectorDapperTester.cs | 1 + .../MySqlConnectorDapperTester.generated.cs | 18 +- end2end/EndToEndTests/MySqlConnectorTester.cs | 1 + .../MySqlConnectorTester.generated.cs | 18 +- .../MySqlConnectorDapperTester.cs | 1 + .../MySqlConnectorDapperTester.generated.cs | 18 +- .../MySqlConnectorTester.cs | 1 + .../MySqlConnectorTester.generated.cs | 18 +- .../MySqlConnectorDapperExample/Models.cs | 3 + .../MySqlConnectorDapperExample/QuerySql.cs | 230 ++++- .../MySqlConnectorDapperExample/request.json | 940 ++++++++++-------- .../request.message | Bin 21422 -> 22751 bytes .../Models.cs | 3 + .../QuerySql.cs | 228 ++++- .../request.json | 940 ++++++++++-------- .../request.message | 473 ++++++--- examples/MySqlConnectorExample/Models.cs | 3 +- examples/MySqlConnectorExample/QuerySql.cs | 438 +++++--- examples/MySqlConnectorExample/request.json | 940 ++++++++++-------- .../MySqlConnectorExample/request.message | Bin 21406 -> 22735 bytes .../MySqlConnectorLegacyExample/Models.cs | 3 + .../MySqlConnectorLegacyExample/QuerySql.cs | 493 ++++++--- .../MySqlConnectorLegacyExample/request.json | 940 ++++++++++-------- .../request.message | 473 ++++++--- examples/NpgsqlDapperExample/QuerySql.cs | 18 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 18 +- examples/NpgsqlExample/QuerySql.cs | 18 +- examples/NpgsqlLegacyExample/QuerySql.cs | 18 +- examples/SqliteDapperExample/QuerySql.cs | 12 +- .../SqliteDapperLegacyExample/QuerySql.cs | 12 +- examples/SqliteExample/QuerySql.cs | 12 +- examples/SqliteLegacyExample/QuerySql.cs | 12 +- examples/config/mysql/types/query.sql | 82 +- examples/config/mysql/types/schema.sql | 27 +- 36 files changed, 3879 insertions(+), 2566 deletions(-) diff --git a/CodeGenerator/Generators/QueriesGen.cs b/CodeGenerator/Generators/QueriesGen.cs index b19841e6..88b13cd0 100644 --- a/CodeGenerator/Generators/QueriesGen.cs +++ b/CodeGenerator/Generators/QueriesGen.cs @@ -4,12 +4,13 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Text.RegularExpressions; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; namespace SqlcGenCsharp.Generators; -internal class QueriesGen(DbDriver dbDriver, string namespaceName) +internal partial class QueriesGen(DbDriver dbDriver, string namespaceName) { private static readonly string[] ResharperDisables = [ @@ -132,16 +133,22 @@ private IEnumerable GetMembersForSingleQuery(Query quer private MemberDeclarationSyntax? GetQueryTextConstant(Query query) { - var transformQueryText = dbDriver.TransformQueryText(query); - if (transformQueryText == string.Empty) + var transformedQueryText = dbDriver.TransformQueryText(query); + if (transformedQueryText == string.Empty) return null; + + var singleLineQueryText = LongWhitespaceRegex().Replace(transformedQueryText, " "); return ParseMemberDeclaration( $""" - private const string {ClassMember.Sql.Name(query.Name)} = "{transformQueryText}"; + private const string {ClassMember.Sql.Name(query.Name)} = "{singleLineQueryText}"; """)! .AppendNewLine(); } + + [GeneratedRegex(@"\s{2,}")] + private static partial Regex LongWhitespaceRegex(); + private MemberDeclarationSyntax AddMethodDeclaration(Query query) { var queryTextConstant = ClassMember.Sql.Name(query.Name); diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index b9c86511..5699cfba 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -224,7 +224,7 @@ public async Task TestMySqlBinaryTypes( byte[] cMediumblob, byte[] cLongblob) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlBinaryTypes(new QuerySql.InsertMysqlBinaryTypesArgs { CBit = cBit, CBinary = cBinary, @@ -235,7 +235,7 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CLongblob = cLongblob }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlBinaryTypesRow { CBit = cBit, CBinary = cBinary, @@ -246,10 +246,10 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlBinaryTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBinaryTypesRow y) { Assert.That(x.CBit, Is.EqualTo(y.CBit)); Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); @@ -553,7 +553,7 @@ public async Task TestBinaryCopyFrom( byte[] cLongblob) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlBinaryTypesBatchArgs { CBit = cBit, CBinary = cBinary, @@ -564,8 +564,8 @@ public async Task TestBinaryCopyFrom( CLongblob = cLongblob }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlBinaryTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlBinaryTypesCntRow { Cnt = batchSize, CBit = cBit, @@ -576,10 +576,10 @@ public async Task TestBinaryCopyFrom( CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlBinaryTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesCntRow x, QuerySql.GetMysqlBinaryTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBit, Is.EqualTo(y.CBit)); diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs index 278c8568..c2e8c6e3 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlBinaryTypes(); await QuerySql.TruncateExtendedBios(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index 9035f675..8c8dc7c8 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -490,8 +490,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlBinaryTypes(new QuerySql.InsertMysqlBinaryTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); + var expected = new QuerySql.GetMysqlBinaryTypesRow { CBit = cBit, CBinary = cBinary, @@ -501,9 +501,9 @@ public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbi CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlBinaryTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBinaryTypesRow y) { Assert.That(x.CBit, Is.EqualTo(y.CBit)); Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); @@ -784,9 +784,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlBinaryTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); + await QuerySql.InsertMysqlBinaryTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlBinaryTypesCntRow { Cnt = batchSize, CBit = cBit, @@ -797,9 +797,9 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlBinaryTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesCntRow x, QuerySql.GetMysqlBinaryTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBit, Is.EqualTo(y.CBit)); diff --git a/end2end/EndToEndTests/MySqlConnectorTester.cs b/end2end/EndToEndTests/MySqlConnectorTester.cs index 3a9c01c2..6c112bc8 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlBinaryTypes(); await QuerySql.TruncateExtendedBios(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 891c0b9b..c610480c 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -490,8 +490,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlBinaryTypes(new QuerySql.InsertMysqlBinaryTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); + var expected = new QuerySql.GetMysqlBinaryTypesRow { CBit = cBit, CBinary = cBinary, @@ -501,9 +501,9 @@ public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbi CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlBinaryTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBinaryTypesRow y) { Assert.That(x.CBit, Is.EqualTo(y.CBit)); Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); @@ -784,9 +784,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlBinaryTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); + await QuerySql.InsertMysqlBinaryTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlBinaryTypesCntRow { Cnt = batchSize, CBit = cBit, @@ -797,9 +797,9 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlBinaryTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesCntRow x, QuerySql.GetMysqlBinaryTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBit, Is.EqualTo(y.CBit)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs index 7a369ce6..da6ef514 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlBinaryTypes(); await QuerySql.TruncateExtendedBios(); } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index ffc0f636..60e28e85 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -490,8 +490,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlBinaryTypes(new QuerySql.InsertMysqlBinaryTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); + var expected = new QuerySql.GetMysqlBinaryTypesRow { CBit = cBit, CBinary = cBinary, @@ -501,9 +501,9 @@ public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbi CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlBinaryTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBinaryTypesRow y) { Assert.That(x.CBit, Is.EqualTo(y.CBit)); Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); @@ -784,9 +784,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlBinaryTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); + await QuerySql.InsertMysqlBinaryTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlBinaryTypesCntRow { Cnt = batchSize, CBit = cBit, @@ -797,9 +797,9 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlBinaryTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesCntRow x, QuerySql.GetMysqlBinaryTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBit, Is.EqualTo(y.CBit)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs index be0cf1b0..e39c25dc 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlBinaryTypes(); await QuerySql.TruncateExtendedBios(); } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index c49bcd1f..4c6c66c5 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -490,8 +490,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlBinaryTypes(new QuerySql.InsertMysqlBinaryTypesArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }); + var expected = new QuerySql.GetMysqlBinaryTypesRow { CBit = cBit, CBinary = cBinary, @@ -501,9 +501,9 @@ public async Task TestMySqlBinaryTypes(byte? cBit, byte[] cBinary, byte[] cVarbi CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlBinaryTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBinaryTypesRow y) { Assert.That(x.CBit, Is.EqualTo(y.CBit)); Assert.That(x.CBinary, Is.EqualTo(y.CBinary)); @@ -784,9 +784,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, byte[] cVarbinary, byte[] cTinyblob, byte[] cBlob, byte[] cMediumblob, byte[] cLongblob) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlBinaryTypesBatchArgs { CBit = cBit, CBinary = cBinary, CVarbinary = cVarbinary, CTinyblob = cTinyblob, CBlob = cBlob, CMediumblob = cMediumblob, CLongblob = cLongblob }).ToList(); + await QuerySql.InsertMysqlBinaryTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlBinaryTypesCntRow { Cnt = batchSize, CBit = cBit, @@ -797,9 +797,9 @@ public async Task TestBinaryCopyFrom(int batchSize, byte? cBit, byte[] cBinary, CMediumblob = cMediumblob, CLongblob = cLongblob }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlBinaryTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesCntRow x, QuerySql.GetMysqlBinaryTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBit, Is.EqualTo(y.CBit)); diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index a5e4cf22..464f9d50 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -39,6 +39,9 @@ public class MysqlType public JsonElement? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } +}; +public class MysqlBinaryType +{ public byte? CBit { get; init; } public byte[]? CBinary { get; init; } public byte[]? CVarbinary { get; init; } diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index 16349bf0..ac0f766f 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -76,7 +76,7 @@ public class GetAuthorArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public required long Id { get; init; } @@ -394,7 +394,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public required Author? Author { get; init; } @@ -615,10 +615,9 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; public class InsertMysqlTypesArgs { - public byte? CBit { get; init; } public bool? CBool { get; init; } public bool? CBoolean { get; init; } public short? CTinyint { get; init; } @@ -650,17 +649,10 @@ public class InsertMysqlTypesArgs public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } - public byte[]? CBinary { get; init; } - public byte[]? CVarbinary { get; init; } - public byte[]? CTinyblob { get; init; } - public byte[]? CBlob { get; init; } - public byte[]? CMediumblob { get; init; } - public byte[]? CLongblob { get; init; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bit", args.CBit); queryParams.Add("c_bool", args.CBool); queryParams.Add("c_boolean", args.CBoolean); queryParams.Add("c_tinyint", args.CTinyint); @@ -692,12 +684,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_date", args.CDate); queryParams.Add("c_datetime", args.CDatetime); queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_binary", args.CBinary); - queryParams.Add("c_varbinary", args.CVarbinary); - queryParams.Add("c_tinyblob", args.CTinyblob); - queryParams.Add("c_blob", args.CBlob); - queryParams.Add("c_mediumblob", args.CMediumblob); - queryParams.Add("c_longblob", args.CLongblob); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) @@ -718,7 +704,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) public class InsertMysqlTypesBatchArgs { - public byte? CBit { get; init; } public bool? CBool { get; init; } public bool? CBoolean { get; init; } public short? CTinyint { get; init; } @@ -750,12 +735,6 @@ public class InsertMysqlTypesBatchArgs public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } - public byte[]? CBinary { get; init; } - public byte[]? CVarbinary { get; init; } - public byte[]? CTinyblob { get; init; } - public byte[]? CBlob { get; init; } - public byte[]? CMediumblob { get; init; } - public byte[]? CLongblob { get; init; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -780,10 +759,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -812,13 +787,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; init; } @@ -853,13 +828,6 @@ public class GetMysqlTypesRow public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } - public byte? CBit { get; init; } - public byte[]? CBinary { get; init; } - public byte[]? CVarbinary { get; init; } - public byte[]? CTinyblob { get; init; } - public byte[]? CBlob { get; init; } - public byte[]? CMediumblob { get; init; } - public byte[]? CLongblob { get; init; } }; public async Task GetMysqlTypes() { @@ -880,13 +848,12 @@ public class GetMysqlTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; public class GetMysqlTypesCntRow { public required long Cnt { get; init; } public bool? CBool { get; init; } public bool? CBoolean { get; init; } - public byte? CBit { get; init; } public short? CTinyint { get; init; } public short? CSmallint { get; init; } public int? CMediumint { get; init; } @@ -916,12 +883,6 @@ public class GetMysqlTypesCntRow public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } - public byte[]? CBinary { get; init; } - public byte[]? CVarbinary { get; init; } - public byte[]? CTinyblob { get; init; } - public byte[]? CBlob { get; init; } - public byte[]? CMediumblob { get; init; } - public byte[]? CLongblob { get; init; } }; public async Task GetMysqlTypesCnt() { @@ -988,4 +949,183 @@ public async Task TruncateMysqlTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } + + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + public class InsertMysqlBinaryTypesArgs + { + public byte? CBit { get; init; } + public byte[]? CBinary { get; init; } + public byte[]? CVarbinary { get; init; } + public byte[]? CTinyblob { get; init; } + public byte[]? CBlob { get; init; } + public byte[]? CMediumblob { get; init; } + public byte[]? CLongblob { get; init; } + }; + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_binary", args.CBinary); + queryParams.Add("c_varbinary", args.CVarbinary); + queryParams.Add("c_tinyblob", args.CTinyblob); + queryParams.Add("c_blob", args.CBlob); + queryParams.Add("c_mediumblob", args.CMediumblob); + queryParams.Add("c_longblob", args.CLongblob); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlBinaryTypesBatchArgs + { + public byte? CBit { get; init; } + public byte[]? CBinary { get; init; } + public byte[]? CVarbinary { get; init; } + public byte[]? CTinyblob { get; init; } + public byte[]? CBlob { get; init; } + public byte[]? CMediumblob { get; init; } + public byte[]? CLongblob { get; init; } + }; + public async Task InsertMysqlBinaryTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public class GetMysqlBinaryTypesRow + { + public byte? CBit { get; init; } + public byte[]? CBinary { get; init; } + public byte[]? CVarbinary { get; init; } + public byte[]? CTinyblob { get; init; } + public byte[]? CBlob { get; init; } + public byte[]? CMediumblob { get; init; } + public byte[]? CLongblob { get; init; } + }; + public async Task GetMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public class GetMysqlBinaryTypesCntRow + { + public required long Cnt { get; init; } + public byte? CBit { get; init; } + public byte[]? CBinary { get; init; } + public byte[]? CVarbinary { get; init; } + public byte[]? CTinyblob { get; init; } + public byte[]? CBlob { get; init; } + public byte[]? CMediumblob { get; init; } + public byte[]? CLongblob { get; init; } + }; + public async Task GetMysqlBinaryTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); + } + + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index 64fdd9a1..0054e5a0 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -349,12 +349,19 @@ "type": { "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_binary_types" + }, + "columns": [ { "name": "c_bit", "length": 8, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "bit" @@ -364,7 +371,7 @@ "name": "c_binary", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "binary" @@ -374,7 +381,7 @@ "name": "c_varbinary", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "varbinary" @@ -384,7 +391,7 @@ "name": "c_tinyblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "tinyblob" @@ -394,7 +401,7 @@ "name": "c_blob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "blob" @@ -404,7 +411,7 @@ "name": "c_mediumblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "mediumblob" @@ -414,7 +421,7 @@ "name": "c_longblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "longblob" @@ -1459,27 +1466,12 @@ "filename": "query.sql" }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -1494,7 +1486,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -1509,7 +1501,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -1524,7 +1516,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -1539,7 +1531,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -1554,7 +1546,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -1569,7 +1561,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -1584,7 +1576,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -1599,7 +1591,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_decimal", "length": 10, @@ -1614,7 +1606,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_dec", "length": 10, @@ -1629,7 +1621,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_numeric", "length": 10, @@ -1644,7 +1636,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_fixed", "length": 10, @@ -1659,7 +1651,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_float", "length": -1, @@ -1674,7 +1666,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -1689,7 +1681,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -1704,7 +1696,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -1719,7 +1711,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -1734,7 +1726,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -1749,7 +1741,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -1764,7 +1756,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -1779,7 +1771,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -1794,7 +1786,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -1809,7 +1801,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -1824,7 +1816,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -1839,7 +1831,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1854,7 +1846,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1869,7 +1861,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1884,7 +1876,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1899,7 +1891,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1914,7 +1906,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1929,7 +1921,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1942,125 +1934,23 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "mysql_types" } }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -2075,7 +1965,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -2090,7 +1980,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -2105,7 +1995,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -2120,7 +2010,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -2135,7 +2025,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -2150,7 +2040,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -2165,7 +2055,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -2180,7 +2070,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_float", "length": -1, @@ -2195,7 +2085,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_numeric", "length": 10, @@ -2210,7 +2100,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_decimal", "length": 10, @@ -2225,7 +2115,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_dec", "length": 10, @@ -2240,7 +2130,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_fixed", "length": 10, @@ -2255,7 +2145,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -2270,7 +2160,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -2285,7 +2175,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -2300,7 +2190,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -2315,7 +2205,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -2330,7 +2220,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -2345,7 +2235,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -2360,7 +2250,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -2375,7 +2265,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -2390,7 +2280,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -2405,7 +2295,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -2420,7 +2310,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -2435,7 +2325,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -2450,7 +2340,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -2465,7 +2355,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -2480,7 +2370,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -2495,7 +2385,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -2510,7 +2400,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -2523,96 +2413,6 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], "filename": "query.sql", @@ -2621,7 +2421,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2976,89 +2776,12 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -3093,17 +2816,6 @@ }, "originalName": "c_boolean" }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, { "name": "c_tinyint", "length": 3, @@ -3422,78 +3134,12 @@ "name": "timestamp" }, "originalName": "c_timestamp" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ @@ -3532,6 +3178,426 @@ "name": "TruncateMysqlTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index a46f273ae05a70e10a8a8b154e080414a4d64784..9ef230d7bd3bceea1e09bcfc76ea6ac946482519 100644 GIT binary patch literal 22751 zcmeHP&2!tv6)*Kkt*qK!+lhmGA@G-kC=#XEj?Kn)M2T{h=);ldByA_vP$VoPp-34Z z9ZMe1^w>^2?M!>kspqECOeeXthxXD-r_*D5$gO`sPd%iU^u7JU0t7%xb`obQjmKO7 z?>+3kef##e?=6=6yPssHY5&%af2U@gb?%k7wrh?*S!vX7xto*h_GEKs8*cfP+s;;b z0_jHM>B_CNzrt<6;jr1(uM-}G9}xV7oj9k1MUX8moi@t(X}FK;>i zcDdp#L&3^hDQ~#$)U9!=ao6!Yx9a#-{s;H1N~6}=ssrD3r@4e0op04EO}A0^^IPS6 z>u$XXEWBdt*fJ}B->Ns7%dJ|?%HJy2e5e?|Zz1i22jd-O-Yt6-XlU1R{if&EH@gzu zbU_(SFo+c4T$!TIbD+->Fh!)>`O5lxextt5-@Pt%mLfK+3svWLK|N(}(`n9o&_5qE zy!S!Zb*Fi0A_u}+zQb*R_vQopIH41hRo8D$E;cIV+G-0d#iO4brvuxqjhb7r{z_%h z_}efUF@~AFv##;!X);h*-)J;yxy)+#(Z-N5*mUbV&`0zf8RAqt?l)Dt8NI&nACMQ4oxP6N^hE9a2-|RpS^^#OvK6>l)E{?`!%_NU#?df6P#Hh&B}&eQTQ7S%!mXc0 zIU8;>mtBn|*Z`LYkHIN1Cx)EhuH#9W3quM}HJCZr1Ai5aoOlMXc11)l<0wLd*r+u& zbd|;P3hO~)I>qLuuGqjB7|WHw;JSOCvOC7kDvMUq9kVbgBsPAOAuw13IHYGuk6S&N z(c2q5-3TUyv!oBvSR$n~@>2x5tC1j15E&n32+SWanZJ+odRqnLGwt=K<6f`mj5j zf;>6^T!8WEK(Xoq=BA|NmFs@33}x%H49$Yuaq3m4YWzJzj%b&CC<(^Yun>k_3qG}t z1fi95t7mu@bRwH1SrE<|l^cnXc-V-v{DP_lHYs;9hKCw$O*eklLoTfq7mKqc)2)u1 z*n)8r-fYgVt}L0nUB3DDcZ;h<_RzdyUNaXLmKI9p6di&U0au;Tmv7V^%Wk*AxM8ph zwrY*X*`h`*tY()D7_gnr17a6R>|8bjUDDMsbPdE%+IeCOXf8?ya@1|E594t4ZZqV9&_D$cwohk! zs7VH3n8A#bM5XKOCq3l+!tz>iwPY?VmsXU~jD`$q%zSHR@n&(&9KAMf;=eI^Xx4K8 zZV3zO&pY>=ihU94INfeWN7b4}`rN>+p*h)?pxWo-x;=(7WLoz5Y%k5VnO6!Mnwh46 zLSVGUKQrW|SY4=r71qNTJ+3QZ)oHf8`a;zhX3D6Q{U&Q#X5UTgd1OrkPjNAJXy_wD zcnY@~@)IMu zCKg>kFtN}>b56~PdW45^=$FZ{nD^2n;zhMu;HJ_2=vDqjhU9MEn45t*mw613pnJ#3 z5$YgfXp^KDsv4Sh!WO2G=j{yoeZ9l}+e6-c zIQ5t_%X1v&=(}V6$kA%UW@hxm_4URKWk}<^2P85)iZphEB~_$nbK!I zwk$D6QR285Kd_vL&VGc?eg#;=gy%dC17c9zVFd*E$|vk{>;g38YU28(jagaKZm{Ih zT2+v17~|JHm?_c7l_1Dnor;OmKXSR>si7z9VU>P(X?fJ z+C%;lQPPyIrKyRKnx?p(L|vzK%IPrWG*X7uBDkiwe#Dhj&$Om0{!(z+>OEKq;l8C5 zc(?g#K;c&DF&OG|t!)UXYwlcN`7V^!nM7CI$GznD5yk1+2rJ{^tShX=?u|>%cg(0! zgE$G&?#DoUA%n-R!5ij7>w9eu?u@X+eZtkx_b#tsCpxlruafzc*TI>BPafD)y`pPxX(CP zm%S8P!8`?aKJR*dQ|pn!P6=uzRx|tW5^FErVeP*wtVhrdU=3(}s(ZT%O$FAV^`bFP zo++)~T%JXEU79H@7R^v+(BmZ!(#+s{HI3t}6-DX)Izj%}NA!WHh0RSeN=*3U$;@%B z42ey7DbknnQl=l}waP5Y8xSbbikLi!FB0A|;2#TbOiX1xyh# z8kR2$NP9UsS7;|ByWn4qr8+^8hIV$U*sLksK~O+ov3$Xazs9iOMJ^` znEsGuh#<)#X6$4cS4espb7k^c<;}>LNg^R5?j0ZP|6}yXLfv;fzO?~Iw6rn^DdEC5 z$d{i`&{I#TYkBisL2F+HFPjhN&^t2+DR@$K(q2k0v?Df$FKJXOETQ~LI7c9336$l9k12r~Wl zTsBKzi%;e!$Ou*mY6}2#(Gc$UmMwZgW)XaVdNzqLK(57SDWW#lW@8e1pJ zP?wXuF44UP$#p?Wv>yDmf0%uWe!rXj2OZ=k>|g-@X@LKPw~0Lp31U>L9-+6t&dITo z$Ob0<@^XFy67(8M@reGXIXCRZK7pzw&)-qKH2V?Q#dsoulp1O%~gLMo&AE4b(JnC^pyD9MY zlcVJG9+6!Yv04C)klw6eoEC~Au!Th#q{4C#n}rJ&dQ~5E;A0t&72jdtEirkBBK~N& zwJVg$*C0#)tRC=}g)dRISWU=#?Bf8uV2Bo9r3#^NEj*E-S}4k;GBRo*T6}`xS@@O9 zO)TR8kiXGSk?=l?#=uOR|GxElSXr?uj7|DzXNQ_`wlQGfD2y-ZEX}yvVv5aSe z#)2hdTEV&$AEk`&#Y88q3$anU2xD7xj`v7_uOjrZflS!gs)Crw^qhpt5Aa_yJt#qt z!G>)ae{BDBN2`$};!10eE=JxJ6IHYpJ4F-|WyC}hp{*iZq0eLy36a1C=Rr4;&Vv*( zm|>0Kdsq;o?Xxiuj}MR(f59rW5Jvk~O9`oFIQ5_|ktORLdobY6RQ-T_i zBNp?Te5$~WcpLy6aRWh4P>e}Fi-$ZcCoUw!5tkAJ1(#AJ2SD;Z&qs%@w4AiKkHb*1 ze^n`I@oF>*`D!e;;U7WAFpwxeR7RTZbk_hBxxTt`^9FneB7}c!{R{w4_AwM?|MDWD zz;1x8Xv6rJt#=Q+{~qd2r5_O(n9AA?P6;CQgp)4q{McZ)Q9auLHn zmEiy)i{k;>P6rUU`fG>CdxWrTar7m^>TfheMu(FQAQ;kvQG~w-@G$Y{j01QWe;~_O z3lBLejwZGv@bXct`@ZVMoQ`{c-b3zduy<+ZolzFGV8B-#3dE5aeah%WG0ee9 zaDmDKW{ODN;ak=<+(nPd30oaSBxdaA$)I?Bbk>Hc13$w(MTU_;R3L%q3>iTpRe^Xi zT_FWxM0T_4)unPTs}^Ky-^u!`0rJ~S%uZJQpa*S3tE(0QoIvHzp>2^nxX8dm3lvG{ zg0C?`w{W?KH#+RWgwGeXv~qzRND^EUgG(m>w&UVp zbl~=~jf2r4M~60}6yit2Kl;d>AWsHU04(nLP{5LMR(K)Z6e9tbfWNw{4+f>47z&Lm zxjx2k;n>tw&|Eyv60rk}Iv+eFyt;L9FXs;hp4_??_2RMW*+}%>Rx!zQ#|uiZN%;Im zI9=Tn#*2t}Y3q$-4;?Qc#WVBfNVDl&mjrLD*_-j<0Qsdz!7@~q6v}La9U^WN*q*qd z>`z?dR3M{unjMO#vMoB+(`~UW)b?f9L)tzb&=9r#?juA8!y}V$XxKMwc&v~YVgbHx zqx575=j+b04eeWlcc&(%Uzy0+zw5iN4qTn{TsV65Eyu(265$k3IR7*LNY|7DL-CW) zJ)QO%Ibk$7{*|9Q8mf&v<#hii!{SzFXc^=&pmE`2Vjt>cSRq%Os=6Q1YPr{bgsTuf i2YJ^LCQ>!=cvft<;@zJN+o{fQGHm#iUv`4*gZ}|YJ7wqq delta 1931 zcmeH{O>7%g5Xber&U*dqY~trSUOR2FUVl4Q;+WV8NtAUQFUU<}Z*8XtiezjPBuh!0 z4}%m0NFWZVq7iiNpsEK>MN~!zXsfnTIdBN57sLUn7Y>zjiPTdsg}3XvPI`cFKwP-& z%$t2PJNxGM{`aq;FTX%vJv0dWcEWDFSzWEyr1we3b?@>m3$xc_A;Y+*R<72YF%CAT z>^ABVKjyU1dSz=T#sJDV9GXi;u%%XhtFrav4TeqD4ui{P1Bt`CW&v(0eXwRS@zvEv zl|(R0YfulaDMPFtO2WR`t?#-OH@8D^SXyc)=ASdQ%J`zg|FsKWH)eB1rBD*Hxl&$S z+u7b+EjM>IR<=bP(tC}^BfQ7{b{C;q`;#L=!tv&0b@TY(L;cS>%L+po|2Fg%*(u-U-P@H+cmE3OnJKkb(x`WH^IP zLM1$pPQj-WG8%$=^f?T+L>h_kMkL)Inc+NSvdaP7liZ;Cdf?GwAIWeMNgaU!JTW&2 z%Pux@n)A|Q(QwdypW*=7LO&mw<9uZLDH3JVE);EdQ5Q@U@>PszYa>itl0^Lhabx)wW_jX$d zrLivdZTX|n^EPSf2=TFw{5jFwVfCZ9+syH&q*L*ur1zrCVeO-k+iBJz>C^cR6|ITK z0={V~Drc01lDLpx&Xq#rVR61F*4CREvDHRnwPRN*>$+QS!cU2`5sM CSet { get; set; } + }; + public class MysqlBinaryType + { public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 180a31a2..c0eb8e62 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -45,10 +45,9 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; public class InsertMysqlTypesArgs { - public byte? CBit { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } public short? CTinyint { get; set; } @@ -80,17 +79,10 @@ public class InsertMysqlTypesArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bit", args.CBit); queryParams.Add("c_bool", args.CBool); queryParams.Add("c_boolean", args.CBoolean); queryParams.Add("c_tinyint", args.CTinyint); @@ -122,12 +114,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_date", args.CDate); queryParams.Add("c_datetime", args.CDatetime); queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_binary", args.CBinary); - queryParams.Add("c_varbinary", args.CVarbinary); - queryParams.Add("c_tinyblob", args.CTinyblob); - queryParams.Add("c_blob", args.CBlob); - queryParams.Add("c_mediumblob", args.CMediumblob); - queryParams.Add("c_longblob", args.CLongblob); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) @@ -148,7 +134,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) public class InsertMysqlTypesBatchArgs { - public byte? CBit { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } public short? CTinyint { get; set; } @@ -180,12 +165,6 @@ public class InsertMysqlTypesBatchArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -210,9 +189,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -240,13 +216,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; set; } @@ -281,13 +257,6 @@ public class GetMysqlTypesRow public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task GetMysqlTypes() { @@ -308,13 +277,12 @@ public async Task GetMysqlTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; public class GetMysqlTypesCntRow { public long Cnt { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } - public byte? CBit { get; set; } public short? CTinyint { get; set; } public short? CSmallint { get; set; } public int? CMediumint { get; set; } @@ -344,12 +312,6 @@ public class GetMysqlTypesCntRow public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task GetMysqlTypesCnt() { @@ -417,6 +379,184 @@ public async Task TruncateMysqlTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + public class InsertMysqlBinaryTypesArgs + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_binary", args.CBinary); + queryParams.Add("c_varbinary", args.CVarbinary); + queryParams.Add("c_tinyblob", args.CTinyblob); + queryParams.Add("c_blob", args.CBlob); + queryParams.Add("c_mediumblob", args.CMediumblob); + queryParams.Add("c_longblob", args.CLongblob); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlBinaryTypesBatchArgs + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlBinaryTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public class GetMysqlBinaryTypesRow + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public class GetMysqlBinaryTypesCntRow + { + public long Cnt { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlBinaryTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); + } + + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -449,7 +589,7 @@ public async Task GetAuthor(GetAuthorArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public long Id { get; set; } @@ -767,7 +907,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index 8f368d58..f2f022c1 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -349,12 +349,19 @@ "type": { "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_binary_types" + }, + "columns": [ { "name": "c_bit", "length": 8, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "bit" @@ -364,7 +371,7 @@ "name": "c_binary", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "binary" @@ -374,7 +381,7 @@ "name": "c_varbinary", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "varbinary" @@ -384,7 +391,7 @@ "name": "c_tinyblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "tinyblob" @@ -394,7 +401,7 @@ "name": "c_blob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "blob" @@ -404,7 +411,7 @@ "name": "c_mediumblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "mediumblob" @@ -414,7 +421,7 @@ "name": "c_longblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "longblob" @@ -608,27 +615,12 @@ }, "queries": [ { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -643,7 +635,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -658,7 +650,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -673,7 +665,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -688,7 +680,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -703,7 +695,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -718,7 +710,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -733,7 +725,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -748,7 +740,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_decimal", "length": 10, @@ -763,7 +755,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_dec", "length": 10, @@ -778,7 +770,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_numeric", "length": 10, @@ -793,7 +785,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_fixed", "length": 10, @@ -808,7 +800,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_float", "length": -1, @@ -823,7 +815,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -838,7 +830,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -853,7 +845,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -868,7 +860,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -883,7 +875,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -898,7 +890,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -913,7 +905,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -928,7 +920,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -943,7 +935,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -958,7 +950,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -973,7 +965,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -988,7 +980,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1003,7 +995,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1018,7 +1010,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1033,7 +1025,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1048,7 +1040,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1063,7 +1055,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1078,7 +1070,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1091,125 +1083,23 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "mysql_types" } }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -1224,7 +1114,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -1239,7 +1129,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -1254,7 +1144,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -1269,7 +1159,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -1284,7 +1174,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -1299,7 +1189,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -1314,7 +1204,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -1329,7 +1219,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_float", "length": -1, @@ -1344,7 +1234,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_numeric", "length": 10, @@ -1359,7 +1249,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_decimal", "length": 10, @@ -1374,7 +1264,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_dec", "length": 10, @@ -1389,7 +1279,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_fixed", "length": 10, @@ -1404,7 +1294,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -1419,7 +1309,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -1434,7 +1324,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -1449,7 +1339,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -1464,7 +1354,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -1479,7 +1369,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -1494,7 +1384,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -1509,7 +1399,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -1524,7 +1414,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -1539,7 +1429,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -1554,7 +1444,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -1569,7 +1459,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1584,7 +1474,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1599,7 +1489,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1614,7 +1504,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1629,7 +1519,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1644,7 +1534,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1659,7 +1549,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1672,96 +1562,6 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], "filename": "query.sql", @@ -1770,7 +1570,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2125,89 +1925,12 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -2242,17 +1965,6 @@ }, "originalName": "c_boolean" }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, { "name": "c_tinyint", "length": 3, @@ -2571,78 +2283,12 @@ "name": "timestamp" }, "originalName": "c_timestamp" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ @@ -2682,6 +2328,426 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", + "cmd": ":exec", + "filename": "query.sql" + }, { "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", "name": "GetAuthor", diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index d2a8fde3..4bef43f4 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.message +++ b/examples/MySqlConnectorDapperLegacyExample/request.message @@ -2,7 +2,7 @@ Г 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╘ *examples/MySqlConnectorDapperLegacyExamplecsharpЗ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorDapperLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunnerыpublic"БpublicО +./dist/LocalRunner╡public"╦public═  mysql_types$ c_bool0R  mysql_typesb tinyint' c_boolean0R  mysql_typesb tinyint' @@ -49,19 +49,20 @@ c_longtext0 c_json0         R  mysql_typesbjson: c_json_string_override0         R  mysql_typesbjson/ c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_set -c_bit0R  mysql_typesbbit% -c_binary0R  mysql_typesbbinary+ +c_set0R  mysql_typesbmysql_types_c_setИ +mysql_binary_types& +c_bit0Rmysql_binary_typesbbit, +c_binary0Rmysql_binary_typesbbinary2 c_varbinary0 -R  mysql_typesb  varbinary2 +Rmysql_binary_typesb  varbinary9 -c_tinyblob0         R  mysql_typesb -tinyblob* -c_blob0         R  mysql_typesbblob6 - c_mediumblob0         R  mysql_typesb  -mediumblob2 +c_tinyblob0         Rmysql_binary_typesb +tinyblob1 +c_blob0         Rmysql_binary_typesbblob= + c_mediumblob0         Rmysql_binary_typesb  +mediumblob9 -c_longblob0         R  mysql_typesb +c_longblob0         Rmysql_binary_typesb longblobА authors& id0         R authorsbbigint& @@ -81,150 +82,165 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_type┤ -╗INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*2. -c_bit0Rpublic mysql_typesbbitzc_bit*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF + author_type0Rextendedbiosbbios_author_typeу +╝ +INSERT INTO mysql_types +( + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 +c_bool0Rpublic mysql_typesb tinyintzc_bool*>: + c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: + c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF c_smallint0         Rpublic mysql_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D @ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> -: +c_smallint*MI + c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 +c_int0         Rpublic mysql_typesbintzc_int*C? + c_integer0         Rpublic mysql_typesbintz c_integer*D@ +c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> : c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublic mysql_typesb decimalz c_decimal*6 +2 c_dec0 -Rpublic mysql_typesb decimalzc_dec*> : +Rpublic mysql_typesb decimalzc_dec*> : c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*: 6 +Rpublic mysql_typesb decimalz c_numeric*: 6 c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*A= -c_float0         Rpublic mysql_typesbfloatzc_float*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF +Rpublic mysql_typesb decimalzc_fixed*A = +c_float0         Rpublic mysql_typesbfloatzc_float*D@ +c_double0         Rpublic mysql_typesbdoublezc_double*XT +c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: +c_char0         Rpublic mysql_typesbcharzc_char*@< +c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL +c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: + c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF c_tinytext0         Rpublic mysql_typesb tinytextz -c_tinytext*PL +c_tinytext*PL c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +mediumtextz c_mediumtext*>: +c_text0         Rpublic mysql_typesbtextzc_text*JF c_longtext0         Rpublic mysql_typesb longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= +c_longtext*>: +c_json0         Rpublic mysql_typesbjsonzc_json*^Z +c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? +c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: +c_year0         Rpublic mysql_typesbyearzc_year*>: +c_date0         Rpublic mysql_typesbdatezc_date*A= c_datetime0Rpublic mysql_typesb datetimez -c_datetime*D @ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp*;!7 -c_binary0Rpublic mysql_typesbbinaryzc_binary*D"@ - c_varbinary0 -Rpublic mysql_typesb  varbinaryz c_varbinary*J#F - -c_tinyblob0         Rpublic mysql_typesb -tinyblobz -c_tinyblob*>$: -c_blob0         Rpublic mysql_typesbblobzc_blob*P%L - c_mediumblob0         Rpublic mysql_typesb  -mediumblobz c_mediumblob*J&F - -c_longblob0         Rpublic mysql_typesb -longblobz -c_longblob: query.sqlB  mysql_types╝ -║INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*2. -c_bit0Rpublic mysql_typesbbitzc_bit*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +c_datetime*D@ + c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp2 Basic types : query.sqlB  mysql_types▄ +╗INSERT INTO mysql_types +( + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 +c_bool0Rpublic mysql_typesb tinyintzc_bool*>: + c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: + c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF c_smallint0         Rpublic mysql_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D @ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A -= -c_float0         Rpublic mysql_typesbfloatzc_float*> : +c_smallint*MI + c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 +c_int0         Rpublic mysql_typesbintzc_int*C? + c_integer0         Rpublic mysql_typesbintz c_integer*D@ +c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A = +c_float0         Rpublic mysql_typesbfloatzc_float*> +: c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*> : +Rpublic mysql_typesb decimalz c_numeric*> : c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublic mysql_typesb decimalz c_decimal*6 2 c_dec0 -Rpublic mysql_typesb decimalzc_dec*:6 +Rpublic mysql_typesb decimalzc_dec*: 6 c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF +Rpublic mysql_typesb decimalzc_fixed*D@ +c_double0         Rpublic mysql_typesbdoublezc_double*XT +c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: +c_char0         Rpublic mysql_typesbcharzc_char*@< +c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL +c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: + c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF c_tinytext0         Rpublic mysql_typesb tinytextz -c_tinytext*PL +c_tinytext*PL c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +mediumtextz c_mediumtext*>: +c_text0         Rpublic mysql_typesbtextzc_text*JF c_longtext0         Rpublic mysql_typesb longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= +c_longtext*>: +c_json0         Rpublic mysql_typesbjsonzc_json*^Z +c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? +c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: +c_year0         Rpublic mysql_typesbyearzc_year*>: +c_date0         Rpublic mysql_typesbdatezc_date*A= c_datetime0Rpublic mysql_typesb datetimez -c_datetime*D @ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp*;!7 -c_binary0Rpublic mysql_typesbbinaryzc_binary*D"@ - c_varbinary0 -Rpublic mysql_typesb  varbinaryz c_varbinary*J#F - -c_tinyblob0         Rpublic mysql_typesb -tinyblobz -c_tinyblob*>$: -c_blob0         Rpublic mysql_typesbblobzc_blob*P%L - c_mediumblob0         Rpublic mysql_typesb  -mediumblobz c_mediumblob*J&F - -c_longblob0         Rpublic mysql_typesb -longblobz -c_longblob: query.sqlB  mysql_typesО -┼SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1 GetMysqlTypes:one", +c_datetime*D@ + c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp: query.sqlB  mysql_types╡ +∙SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", c_bool0R  mysql_typesb tinyintzc_bool"2 c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> @@ -274,38 +290,71 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set"& -c_bit0R  mysql_typesbbitzc_bit"/ -c_binary0R  mysql_typesbbinaryzc_binary"8 - c_varbinary0 -R  mysql_typesb  varbinaryz c_varbinary"> - -c_tinyblob0         R  mysql_typesb -tinyblobz -c_tinyblob"2 -c_blob0         R  mysql_typesbblobzc_blob"D - c_mediumblob0         R  mysql_typesb  -mediumblobz c_mediumblob"> - -c_longblob0         R  mysql_typesb -longblobz -c_longblob: query.sql■ -╜SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlї +┴SELECT + COUNT(*) AS cnt, + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp FROM mysql_types -GROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +GROUP BY + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp LIMIT 1GetMysqlTypesCnt:one" cnt0         @bbigint", c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"& -c_bit0R  mysql_typesbbitzc_bit"2 + c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> c_smallint0         R  mysql_typesb @@ -351,27 +400,121 @@ c_longtext"2 c_datetime0R  mysql_typesb datetimez c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp"/ -c_binary0R  mysql_typesbbinaryzc_binary"8 + c_timestamp0R  mysql_typesb  timestampz c_timestamp: query.sqlО +{SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_typesGetMysqlFunctions:one" +max_int0         @bany"# + max_varchar0         @bany"% + max_timestamp0         @bany: query.sqlB +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlУ +о +INSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?)InsertMysqlBinaryTypes:exec*95 +c_bit0Rpublicmysql_binary_typesbbitzc_bit*B> +c_binary0Rpublicmysql_binary_typesbbinaryzc_binary*KG c_varbinary0 -R  mysql_typesb  varbinaryz c_varbinary"> +Rpublicmysql_binary_typesb  varbinaryz c_varbinary*QM -c_tinyblob0         R  mysql_typesb +c_tinyblob0         Rpublicmysql_binary_typesb tinyblobz -c_tinyblob"2 -c_blob0         R  mysql_typesbblobzc_blob"D - c_mediumblob0         R  mysql_typesb  -mediumblobz c_mediumblob"> +c_tinyblob*EA +c_blob0         Rpublicmysql_binary_typesbblobzc_blob*WS + c_mediumblob0         Rpublicmysql_binary_typesb  +mediumblobz c_mediumblob*QM -c_longblob0         R  mysql_typesb +c_longblob0         Rpublicmysql_binary_typesb longblobz -c_longblob: query.sqlВ -oSELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_typesGetMysqlFunctions:one" -max_int0         @bany"# - max_varchar0         @bany"% - max_timestamp0         @bany: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlМ +c_longblob2 Binary types : query.sqlBmysql_binary_typesЛ +нINSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?)InsertMysqlBinaryTypesBatch :copyfrom*95 +c_bit0Rpublicmysql_binary_typesbbitzc_bit*B> +c_binary0Rpublicmysql_binary_typesbbinaryzc_binary*KG + c_varbinary0 +Rpublicmysql_binary_typesb  varbinaryz c_varbinary*QM + +c_tinyblob0         Rpublicmysql_binary_typesb +tinyblobz +c_tinyblob*EA +c_blob0         Rpublicmysql_binary_typesbblobzc_blob*WS + c_mediumblob0         Rpublicmysql_binary_typesb  +mediumblobz c_mediumblob*QM + +c_longblob0         Rpublicmysql_binary_typesb +longblobz +c_longblob: query.sqlBmysql_binary_types╫ +qSELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1GetMysqlBinaryTypes:one"- +c_bit0Rmysql_binary_typesbbitzc_bit"6 +c_binary0Rmysql_binary_typesbbinaryzc_binary"? + c_varbinary0 +Rmysql_binary_typesb  varbinaryz c_varbinary"E + +c_tinyblob0         Rmysql_binary_typesb +tinyblobz +c_tinyblob"9 +c_blob0         Rmysql_binary_typesbblobzc_blob"K + c_mediumblob0         Rmysql_binary_typesb  +mediumblobz c_mediumblob"E + +c_longblob0         Rmysql_binary_typesb +longblobz +c_longblob: query.sqlЬ +ТSELECT + COUNT(*) AS cnt, + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +FROM mysql_binary_types +GROUP BY + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +LIMIT 1GetMysqlBinaryTypesCnt:one" +cnt0         @bbigint"- +c_bit0Rmysql_binary_typesbbitzc_bit"6 +c_binary0Rmysql_binary_typesbbinaryzc_binary"? + c_varbinary0 +Rmysql_binary_typesb  varbinaryz c_varbinary"E + +c_tinyblob0         Rmysql_binary_typesb +tinyblobz +c_tinyblob"9 +c_blob0         Rmysql_binary_typesbblobzc_blob"K + c_mediumblob0         Rmysql_binary_typesb  +mediumblobz c_mediumblob"E + +c_longblob0         Rmysql_binary_typesb +longblobz +c_longblob: query.sqlO +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlМ 8SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1 GetAuthor:one"* id0         R authorsbbigintzid", name0         R authorsbtextzname"( diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index f1f774be..d8b3d712 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -5,7 +5,8 @@ using System.Text.Json; namespace MySqlConnectorExampleGen; -public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); +public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); +public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index 8c4f615c..4deb21ec 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -99,7 +99,7 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); public readonly record struct ListAuthorsArgs(int Limit, int Offset); public async Task> ListAuthors(ListAuthorsArgs args) @@ -537,7 +537,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); public async Task> ListAllAuthorsBooks() { @@ -783,8 +783,8 @@ public async Task TruncateExtendedBios() } } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; - public readonly record struct InsertMysqlTypesArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; + public readonly record struct InsertMysqlTypesArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { if (this.Transaction == null) @@ -794,7 +794,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) await connection.OpenAsync(); using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) { - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); @@ -826,12 +825,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -848,7 +841,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { command.CommandText = InsertMysqlTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); @@ -880,17 +872,11 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public readonly record struct InsertMysqlTypesBatchArgs(byte? CBit, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public readonly record struct InsertMysqlTypesBatchArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); public async Task InsertMysqlTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; @@ -914,10 +900,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -946,14 +928,14 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; - public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; + public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); public async Task GetMysqlTypes() { if (this.Transaction == null) @@ -1000,14 +982,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() }; } } @@ -1063,14 +1038,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() }; } } @@ -1079,8 +1047,8 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; - public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, byte? CBit, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; + public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); public async Task GetMysqlTypesCnt() { if (this.Transaction == null) @@ -1099,42 +1067,35 @@ public async Task InsertMysqlTypesBatch(List args) Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? null : reader.GetBoolean(1), CBoolean = reader.IsDBNull(2) ? null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CTinyint = reader.IsDBNull(3) ? null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15), + CChar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), + CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), + CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), + CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), + CText = reader.IsDBNull(22) ? null : reader.GetString(22), + CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), + CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), + CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), + CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(28) ? null : reader.GetInt16(28), + CDate = reader.IsDBNull(29) ? null : reader.GetDateTime(29), + CDatetime = reader.IsDBNull(30) ? null : reader.GetDateTime(30), + CTimestamp = reader.IsDBNull(31) ? null : reader.GetDateTime(31) }; } } @@ -1162,42 +1123,35 @@ public async Task InsertMysqlTypesBatch(List args) Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? null : reader.GetBoolean(1), CBoolean = reader.IsDBNull(2) ? null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CTinyint = reader.IsDBNull(3) ? null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15), + CChar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), + CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), + CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), + CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), + CText = reader.IsDBNull(22) ? null : reader.GetString(22), + CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), + CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), + CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), + CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(28) ? null : reader.GetInt16(28), + CDate = reader.IsDBNull(29) ? null : reader.GetDateTime(29), + CDatetime = reader.IsDBNull(30) ? null : reader.GetDateTime(30), + CTimestamp = reader.IsDBNull(31) ? null : reader.GetDateTime(31) }; } } @@ -1290,4 +1244,256 @@ public async Task TruncateMysqlTypes() await command.ExecuteNonQueryAsync(); } } + + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + public readonly record struct InsertMysqlBinaryTypesArgs(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(InsertMysqlBinaryTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public readonly record struct InsertMysqlBinaryTypesBatchArgs(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public async Task InsertMysqlBinaryTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public readonly record struct GetMysqlBinaryTypesRow(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public async Task GetMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlBinaryTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + + return null; + } + + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public readonly record struct GetMysqlBinaryTypesCntRow(long Cnt, byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); + public async Task GetMysqlBinaryTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlBinaryTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlBinaryTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlBinaryTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index 555922e7..2eb46a35 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -349,12 +349,19 @@ "type": { "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_binary_types" + }, + "columns": [ { "name": "c_bit", "length": 8, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "bit" @@ -364,7 +371,7 @@ "name": "c_binary", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "binary" @@ -374,7 +381,7 @@ "name": "c_varbinary", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "varbinary" @@ -384,7 +391,7 @@ "name": "c_tinyblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "tinyblob" @@ -394,7 +401,7 @@ "name": "c_blob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "blob" @@ -404,7 +411,7 @@ "name": "c_mediumblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "mediumblob" @@ -414,7 +421,7 @@ "name": "c_longblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "longblob" @@ -1459,27 +1466,12 @@ "filename": "query.sql" }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -1494,7 +1486,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -1509,7 +1501,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -1524,7 +1516,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -1539,7 +1531,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -1554,7 +1546,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -1569,7 +1561,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -1584,7 +1576,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -1599,7 +1591,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_decimal", "length": 10, @@ -1614,7 +1606,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_dec", "length": 10, @@ -1629,7 +1621,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_numeric", "length": 10, @@ -1644,7 +1636,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_fixed", "length": 10, @@ -1659,7 +1651,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_float", "length": -1, @@ -1674,7 +1666,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -1689,7 +1681,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -1704,7 +1696,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -1719,7 +1711,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -1734,7 +1726,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -1749,7 +1741,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -1764,7 +1756,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -1779,7 +1771,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -1794,7 +1786,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -1809,7 +1801,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -1824,7 +1816,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -1839,7 +1831,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1854,7 +1846,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1869,7 +1861,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1884,7 +1876,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1899,7 +1891,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1914,7 +1906,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1929,7 +1921,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1942,125 +1934,23 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "mysql_types" } }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -2075,7 +1965,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -2090,7 +1980,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -2105,7 +1995,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -2120,7 +2010,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -2135,7 +2025,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -2150,7 +2040,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -2165,7 +2055,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -2180,7 +2070,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_float", "length": -1, @@ -2195,7 +2085,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_numeric", "length": 10, @@ -2210,7 +2100,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_decimal", "length": 10, @@ -2225,7 +2115,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_dec", "length": 10, @@ -2240,7 +2130,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_fixed", "length": 10, @@ -2255,7 +2145,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -2270,7 +2160,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -2285,7 +2175,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -2300,7 +2190,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -2315,7 +2205,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -2330,7 +2220,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -2345,7 +2235,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -2360,7 +2250,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -2375,7 +2265,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -2390,7 +2280,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -2405,7 +2295,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -2420,7 +2310,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -2435,7 +2325,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -2450,7 +2340,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -2465,7 +2355,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -2480,7 +2370,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -2495,7 +2385,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -2510,7 +2400,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -2523,96 +2413,6 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], "filename": "query.sql", @@ -2621,7 +2421,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2976,89 +2776,12 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -3093,17 +2816,6 @@ }, "originalName": "c_boolean" }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, { "name": "c_tinyint", "length": 3, @@ -3422,78 +3134,12 @@ "name": "timestamp" }, "originalName": "c_timestamp" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ @@ -3532,6 +3178,426 @@ "name": "TruncateMysqlTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index ff012496f949702dbeff93b4626b2045e91b7c57..5283adaeade539646c904b44b2ddba222ed07d40 100644 GIT binary patch literal 22735 zcmeHP&2t<_6=(J3Rm+O)ww*YW4^PIiElm_@rPz+W#CEjO$|%u?BdwDF6}I+cEKjo9 zm1kCtC6}u>LP0?lTyx?Ip(;fs7r1cYLQx#ykQ@97aDYpAufL{ec4l@hJBd?Bh7NA-__rIz8Ru?gbF1O_Q?*v}rn@o4Zcnv$w&0duyX93Bl^YDHH4bbvgsD3^EEZ#U*z&8Aapx4a^+ zI~}U|w<_M2@vr`SR^6#?Z>%}M@3*ad+uL>~tPQ8>c$Ky@=Wltf_vPJYWz+GuDm7;X z3Rd2Vm3_@=S`$|5j^lZ5-SMsb_wQM?R%3g!2^`m)_A+X5VY^vtyRD|5->lp%yUjMR z@G6zDHCFzf)ois_wi^vAf3wo?p-TL|g|zqYPjr=er{dM1i9O5p+n(Fp=t*?j1vRw6 z2vUS|HHtdVgCgyetL`!ZEaT@Zq52Ll||z( zBV^PVVMfmS#-}fmp<21xYBh41weX|ruyLU6Hg}+t=vgw%shmo)JE7pp^`}UjA*5FJ zH!F<>$j^Nt|2ir`jI5-h=g3H{yy?{4?ag$i5o1`9(J?YuL#s@A$)M4Xt(?GCLc!T^ zyzbPKBnvumH_}CCjUi40(ht+~)_3YsTR#=G_& zc&E_H92j`Hp{Qn$8C3JbWllz4#?kCM?cAX#{ZZpcm>#H%AN3O@r|LE<-i~nVXHZVn zZRfIUu>@6cdGHvV5_4k63GOB@WRn=7% z&nv74iRm<(n|fjcV_+$M7VTQn95r~kUA$@NBL`H9K z@U$9C3TMaw0E}bXO}uoFFnj&JdVCU^4#z=k<;X$YVt)-)LfHThuqEjAG7=thW$V@s9WJQi(6Y1Hay@I39A+${g^2R# z5O4vor$fcM3qYHal2>W^jS7^l&oeX&ZpUfXox1VY3^}Y_4xl6$QzJqcb}jhS4ibb` z(ygB6UC@neiey1JYgBGDM&cnO((?1F7TBcRi5MPgbTr-gX&sF4(Fy z5oe1UwXm99HekSZx(|q5B(ZbZ40K6P!;H($53A*u@kt+BthmbFLWQ7=&d~bLX*8%_&gLNix`QH{Eu~1)+foAZ)*w?V~0c zf?)TxKdhGMl%*NsB!b{*`*uBb#v_Mgo*#g>A^YA0k|bB zXfW^Gb!zqnsN+nh868n;8tHQbw}$3qUxaF(i|h6n&X8%@=d%4Y*I`~MY-nbh0t$iA z8h_7_mt%FI23A-P28NySz z^^l(!Reqwd1C4upe8nUf+XMJ4t<#O)^pQeWbr8*QE-k)UH0M@lmx}9iMH~cYPoEv9 zPim@J0q1+Y(r!Cm^KtST7ePtq$U(H=@)ob2HqBqmtOfR84UH^aU3Z8z57SJk7@Amg z{lLUR56(LcC+ZO%%AsE(M`PYg4~rKy8iAWe_oG+&=NXc_aeaOk>RjQ0KZ5QZCr7BO zs5)LPuA3{XC3EG*(h?oLv4yKdDRr2QW3Tn0FC1HB76a||?r4mU`pA{8HWv)R#T9dG z%3fbuoGa$tx^GW`&po8<&Bw9$SWQo_fT2y1eyD0_+6h~jLY}uW==b#w`)?n4@8Q&A z&aTXJn4{|sEuU-esfhMwn{^zt-*_$B=VA7oCWEY{Dasea8&uEmm9G5HGh|wy`Pj0= z97Bl{V*J2zB0BpKKKm754HKU8I1GqEafcKT;47c7%drd4kSmGnmkwrSO}oL8M{89< zu3?N{^^wmq!nlMtm_&Rd%X&NgN$?7QqH$ryhDF_EYdTCIHKS?E_^glo zDWarlT}#uGAvH~NJ&C%`=#(>I${C~#t3_~4bNz@bsh$~4Rs5ykvekRA62g5;De!Ld zwSdB{)1xrd=eM^Ypsu-df#rKpT6Yp%ai8>)-$fLsYa^_ThqJD*7JD}?Io~m(Mh)U5 zNRR0b5e;hvo=Dqz0A%%Ltp>Xougw8qjGK|>9d<8zpFu3{UESfyAOUudZ;o-xg77T* zKmI>FU<-QbHP}ViBg&=G5$+d@BJfafTcA|7KDrE(Ip$S!QWYQF0=m@VEFnr|1->cd=3=Q`g2kWwz zLMxc3!Oj<4&u?ozQrIa$&BSVE?_FX&NOxF!?+WV?bOTreTA%9Poi}D5pO0*&-59K92l*Fy2Qw`1?7(uuaI-^u#a7?Ki;dE+l2#6C^=$Lz@1s zmaDgyvx9UoF%zF4OwY*g>REjI+u0%dYD|hgK@d;X@A8zsJ(oQ|uO?*e(**>XetSNf zrLV^)^AltQs|2+L0J>Vk&BOib5*Rx0HYq3fEE&^oU z-@cxubSX8n-$hb%QvkH-C^s1~Apl@Z5CNttXwXze5xmJU?oH$jqOAmzB#FE(&M4?j zBqy})IJ7Qtmq0?jRMwWs3vom1p5#3i-4Bo;ONJ<+$B+YB_Y`lPobh^VoutOr$uiXC z1g}eUuR(HMkP@v2f9)S+pQhjIW&c4Jc?mlhz<(OxKjCd+k3xbNm8wVR?Uy+@Rub93 z#Ge5Z2Y1BxEhNZ@+rNNr9GE~&&vG?IH~J*Vf-XZ!tO5Q4|8txlfkY481^h^f{P+ug z?0N8G&~bQ$fCF~$B)l5zSQ24R%QE<}li6`yltxB3zI*PLD@H zZlThy-7<#rwbs_oO|P}Nk9Kiq>OR`_wY++Oc4qK4^hM)4Y&4$ zQu!K$34qlD{<82T$`-2$d5?V@fENtW;;U346t0CQGE@sixl~3*Ekuh?Fgy#ta=D3R z902k+`YjUPXVF;rg=H5{dFat`xsMa~XD#EWT|E5B_h(ILk)>5~KVxo+gNFuaVge&x!EFvKi*x)?qM$&nZLIyLe zaeNO8Vsv~q2IBDnlHxB|g%-l-{Awv7)&UyTgBG29iSZ5I9&~!??O~7*Dv7_=x!CAw zr{Wls#F5r~YQ!{wI~pnF_8CM;BW$tJQSOnX(@tq;LPQj59*SqCNJznjT+`7AXr%sV z4j7L`bAVr7b2*u$yccRNsq|^v zu%<>b!PHa~Gs2e=z0_cg@$cj0j|VtzVL9Kq)f+3NF?-ycT{mk%niwZMnctM4M&yXa zye6M2a3dZE07u+FkP{SRlF#BH56g)Q330@w#6ZEN6v+XQywCH|p(`yXE$-tml6>Ut2!|z>|FpMcKc+h$yfZ zU@O}2|J0un{~iwE-??U6Z0dQQ^us6)?T39m6U4`0(;7G)?DVv68X}{^Ne2)N>A@(%-+g$Pcyz`-Jd8h)<*S8< z92G|s+YxyADAs*n^l*H&N92U9&LR>s_DOO;ygoW>!_o z7@=FZ+`}7P_F%&2i&|Q_zz(Dc?V!8_J1EcNB8wkQ@vPcFDGmbAnBEk5L7 z8Ax$jMLIW&f*~bt6XSC2H(?v)H4I)Tl|Tq7T@1uB6fy4AP#5-Fu%~hv11M6P`lOXE zR4n~bqjED8mhB>Jt(;{T60Uj|2i}8;LSTtv-hnNZ>p>I&@AWQB%^FOk3Pmu%Z^3p; z)?{f9y^9qw111!7CI!qqVRAEx+47>+>bg}JKN%t)XF7Mj=roVppm-*JP;~6L*dHCZ zy{x)FI^^ikW|TtwX!zRzxgF%mU`&$9lRgc7)o#?0QJs#{(Low%>b%Xn%NQ5)KXfh7FHZ@%g7q$(dItbM~(X?x_=3=RFtBUVYo~@VrDg1{4kejX%vb<-}0@B(%qa zUZaPL21md0Ge<)ektdw){Zv@oOO8y!g7z? zy;a}6vRSOvcQ$s*tjv&g>=OmLVY_s-9%f+O>~zv89v8bS#kWgWTVF)*EZVzIV;qG- zPJ%bs-X2jFYSry}dGm4^Yw)1pfIsFi*9_zExJ!onbJ9r|g>|Eu)5weOJ565JMjx7dK5ngM%*D?EY&t@)vb+?QYq4Lw^rKP2`M5xHZt&0 z=+dF;1dlpsjS2{#JFOQyKFM}c6n}99{(5^Z;3k@jOmYMOgBV!2@p}65!aIVY+YSzQcEsYeoprx@kD|kqKe+Pgx4fuisX9N`|I~af` zqcVhZGW(+7B`4GSpNPpYv+UqV`J?0@dikU<_%YYV8iJ41L+}4)Q~@n-Cuw6OjUq4+ z8;0xoKDc98;7hI7{MCSX55Ct1J6U*h-00-Mbv6eb7;qD#&k}Gdukn_5d3Vs+HfUi`M5fE z`ohn4wAN!+Cp)+%#s-#x)uv8&Kpk!x8^m(`CUw5;X^>c!P_*r3R_bNfD4S$dGi^^J zOkr9C C2oSLV diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index fc7ef595..a82a811c 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -40,6 +40,9 @@ public class MysqlType public JsonElement? CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } + }; + public class MysqlBinaryType + { public byte? CBit { get; set; } public byte[] CBinary { get; set; } public byte[] CVarbinary { get; set; } diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 0aed6423..78efe44f 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -42,10 +42,9 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = "INSERT INTO mysql_types (c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) VALUES ( @c_bit, @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; public class InsertMysqlTypesArgs { - public byte? CBit { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } public short? CTinyint { get; set; } @@ -77,12 +76,6 @@ public class InsertMysqlTypesArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { @@ -93,7 +86,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) await connection.OpenAsync(); using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) { - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); @@ -125,12 +117,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -147,7 +133,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { command.CommandText = InsertMysqlTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); @@ -179,19 +164,12 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } public class InsertMysqlTypesBatchArgs { - public byte? CBit { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } public short? CTinyint { get; set; } @@ -223,12 +201,6 @@ public class InsertMysqlTypesBatchArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -253,9 +225,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); @@ -283,13 +252,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bit", "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; set; } @@ -324,13 +293,6 @@ public class GetMysqlTypesRow public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task GetMysqlTypes() { @@ -378,14 +340,7 @@ public async Task GetMysqlTypes() CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() }; } } @@ -441,14 +396,7 @@ public async Task GetMysqlTypes() CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet(), - CBit = reader.IsDBNull(32) ? (byte? )null : reader.GetFieldValue(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() }; } } @@ -457,13 +405,12 @@ public async Task GetMysqlTypes() return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float , c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types GROUP BY c_bool , c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; public class GetMysqlTypesCntRow { public long Cnt { get; set; } public bool? CBool { get; set; } public bool? CBoolean { get; set; } - public byte? CBit { get; set; } public short? CTinyint { get; set; } public short? CSmallint { get; set; } public int? CMediumint { get; set; } @@ -493,12 +440,6 @@ public class GetMysqlTypesCntRow public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } }; public async Task GetMysqlTypesCnt() { @@ -518,42 +459,35 @@ public async Task GetMysqlTypesCnt() Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), + CChar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), + CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), + CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), + CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), + CText = reader.IsDBNull(22) ? null : reader.GetString(22), + CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), + CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), + CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), + CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(28) ? (short? )null : reader.GetInt16(28), + CDate = reader.IsDBNull(29) ? (DateTime? )null : reader.GetDateTime(29), + CDatetime = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), + CTimestamp = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31) }; } } @@ -581,42 +515,35 @@ public async Task GetMysqlTypesCnt() Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CBit = reader.IsDBNull(3) ? (byte? )null : reader.GetFieldValue(3), - CTinyint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CSmallint = reader.IsDBNull(5) ? (short? )null : reader.GetInt16(5), - CMediumint = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInt = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CInteger = reader.IsDBNull(8) ? (int? )null : reader.GetInt32(8), - CBigint = reader.IsDBNull(9) ? (long? )null : reader.GetInt64(9), - CFloat = reader.IsDBNull(10) ? (double? )null : reader.GetDouble(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDecimal = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDec = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CFixed = reader.IsDBNull(14) ? (decimal? )null : reader.GetDecimal(14), - CDouble = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CDoublePrecision = reader.IsDBNull(16) ? (double? )null : reader.GetDouble(16), - CChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CNationalChar = reader.IsDBNull(19) ? null : reader.GetString(19), - CVarchar = reader.IsDBNull(20) ? null : reader.GetString(20), - CTinytext = reader.IsDBNull(21) ? null : reader.GetString(21), - CMediumtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CText = reader.IsDBNull(23) ? null : reader.GetString(23), - CLongtext = reader.IsDBNull(24) ? null : reader.GetString(24), - CJson = reader.IsDBNull(25) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(25)), - CJsonStringOverride = reader.IsDBNull(26) ? null : reader.GetString(26), - CEnum = reader.IsDBNull(27) ? (MysqlTypesCEnum? )null : reader.GetString(27).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(28) ? null : reader.GetString(28).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(29) ? (short? )null : reader.GetInt16(29), - CDate = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CDatetime = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31), - CTimestamp = reader.IsDBNull(32) ? (DateTime? )null : reader.GetDateTime(32), - CBinary = reader.IsDBNull(33) ? null : reader.GetFieldValue(33), - CVarbinary = reader.IsDBNull(34) ? null : reader.GetFieldValue(34), - CTinyblob = reader.IsDBNull(35) ? null : reader.GetFieldValue(35), - CBlob = reader.IsDBNull(36) ? null : reader.GetFieldValue(36), - CMediumblob = reader.IsDBNull(37) ? null : reader.GetFieldValue(37), - CLongblob = reader.IsDBNull(38) ? null : reader.GetFieldValue(38) + CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), + CChar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), + CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), + CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), + CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), + CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), + CText = reader.IsDBNull(22) ? null : reader.GetString(22), + CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), + CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), + CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), + CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), + CYear = reader.IsDBNull(28) ? (short? )null : reader.GetInt16(28), + CDate = reader.IsDBNull(29) ? (DateTime? )null : reader.GetDateTime(29), + CDatetime = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), + CTimestamp = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31) }; } } @@ -715,6 +642,294 @@ public async Task TruncateMysqlTypes() } } + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + public class InsertMysqlBinaryTypesArgs + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(InsertMysqlBinaryTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public class InsertMysqlBinaryTypesBatchArgs + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task InsertMysqlBinaryTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public class GetMysqlBinaryTypesRow + { + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlBinaryTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } + } + } + + return null; + } + + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + public class GetMysqlBinaryTypesCntRow + { + public long Cnt { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } + }; + public async Task GetMysqlBinaryTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlBinaryTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlBinaryTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlBinaryTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -781,7 +996,7 @@ public async Task GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public long Id { get; set; } @@ -1280,7 +1495,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index 8732070b..f6db1eaa 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -349,12 +349,19 @@ "type": { "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_binary_types" + }, + "columns": [ { "name": "c_bit", "length": 8, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "bit" @@ -364,7 +371,7 @@ "name": "c_binary", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "binary" @@ -374,7 +381,7 @@ "name": "c_varbinary", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "varbinary" @@ -384,7 +391,7 @@ "name": "c_tinyblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "tinyblob" @@ -394,7 +401,7 @@ "name": "c_blob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "blob" @@ -404,7 +411,7 @@ "name": "c_mediumblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "mediumblob" @@ -414,7 +421,7 @@ "name": "c_longblob", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_binary_types" }, "type": { "name": "longblob" @@ -608,27 +615,12 @@ }, "queries": [ { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -643,7 +635,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -658,7 +650,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -673,7 +665,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -688,7 +680,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -703,7 +695,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -718,7 +710,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -733,7 +725,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -748,7 +740,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_decimal", "length": 10, @@ -763,7 +755,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_dec", "length": 10, @@ -778,7 +770,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_numeric", "length": 10, @@ -793,7 +785,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_fixed", "length": 10, @@ -808,7 +800,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_float", "length": -1, @@ -823,7 +815,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -838,7 +830,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -853,7 +845,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -868,7 +860,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -883,7 +875,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -898,7 +890,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -913,7 +905,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -928,7 +920,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -943,7 +935,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -958,7 +950,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -973,7 +965,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -988,7 +980,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1003,7 +995,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1018,7 +1010,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1033,7 +1025,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1048,7 +1040,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1063,7 +1055,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1078,7 +1070,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1091,125 +1083,23 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], + "comments": [ + " Basic types " + ], "filename": "query.sql", "insert_into_table": { "name": "mysql_types" } }, { - "text": "INSERT INTO mysql_types \n(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp,\n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, - "column": { - "name": "c_bit", - "length": 8, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 2, "column": { "name": "c_bool", "length": 1, @@ -1224,7 +1114,7 @@ } }, { - "number": 3, + "number": 2, "column": { "name": "c_boolean", "length": 1, @@ -1239,7 +1129,7 @@ } }, { - "number": 4, + "number": 3, "column": { "name": "c_tinyint", "length": 3, @@ -1254,7 +1144,7 @@ } }, { - "number": 5, + "number": 4, "column": { "name": "c_smallint", "length": -1, @@ -1269,7 +1159,7 @@ } }, { - "number": 6, + "number": 5, "column": { "name": "c_mediumint", "length": -1, @@ -1284,7 +1174,7 @@ } }, { - "number": 7, + "number": 6, "column": { "name": "c_int", "length": -1, @@ -1299,7 +1189,7 @@ } }, { - "number": 8, + "number": 7, "column": { "name": "c_integer", "length": -1, @@ -1314,7 +1204,7 @@ } }, { - "number": 9, + "number": 8, "column": { "name": "c_bigint", "length": -1, @@ -1329,7 +1219,7 @@ } }, { - "number": 10, + "number": 9, "column": { "name": "c_float", "length": -1, @@ -1344,7 +1234,7 @@ } }, { - "number": 11, + "number": 10, "column": { "name": "c_numeric", "length": 10, @@ -1359,7 +1249,7 @@ } }, { - "number": 12, + "number": 11, "column": { "name": "c_decimal", "length": 10, @@ -1374,7 +1264,7 @@ } }, { - "number": 13, + "number": 12, "column": { "name": "c_dec", "length": 10, @@ -1389,7 +1279,7 @@ } }, { - "number": 14, + "number": 13, "column": { "name": "c_fixed", "length": 10, @@ -1404,7 +1294,7 @@ } }, { - "number": 15, + "number": 14, "column": { "name": "c_double", "length": -1, @@ -1419,7 +1309,7 @@ } }, { - "number": 16, + "number": 15, "column": { "name": "c_double_precision", "length": -1, @@ -1434,7 +1324,7 @@ } }, { - "number": 17, + "number": 16, "column": { "name": "c_char", "length": -1, @@ -1449,7 +1339,7 @@ } }, { - "number": 18, + "number": 17, "column": { "name": "c_nchar", "length": -1, @@ -1464,7 +1354,7 @@ } }, { - "number": 19, + "number": 18, "column": { "name": "c_national_char", "length": -1, @@ -1479,7 +1369,7 @@ } }, { - "number": 20, + "number": 19, "column": { "name": "c_varchar", "length": 100, @@ -1494,7 +1384,7 @@ } }, { - "number": 21, + "number": 20, "column": { "name": "c_tinytext", "length": -1, @@ -1509,7 +1399,7 @@ } }, { - "number": 22, + "number": 21, "column": { "name": "c_mediumtext", "length": -1, @@ -1524,7 +1414,7 @@ } }, { - "number": 23, + "number": 22, "column": { "name": "c_text", "length": -1, @@ -1539,7 +1429,7 @@ } }, { - "number": 24, + "number": 23, "column": { "name": "c_longtext", "length": -1, @@ -1554,7 +1444,7 @@ } }, { - "number": 25, + "number": 24, "column": { "name": "c_json", "length": -1, @@ -1569,7 +1459,7 @@ } }, { - "number": 26, + "number": 25, "column": { "name": "c_json_string_override", "length": -1, @@ -1584,7 +1474,7 @@ } }, { - "number": 27, + "number": 26, "column": { "name": "c_enum", "length": 6, @@ -1599,7 +1489,7 @@ } }, { - "number": 28, + "number": 27, "column": { "name": "c_set", "length": 15, @@ -1614,7 +1504,7 @@ } }, { - "number": 29, + "number": 28, "column": { "name": "c_year", "length": -1, @@ -1629,7 +1519,7 @@ } }, { - "number": 30, + "number": 29, "column": { "name": "c_date", "length": -1, @@ -1644,7 +1534,7 @@ } }, { - "number": 31, + "number": 30, "column": { "name": "c_datetime", "length": 19, @@ -1659,7 +1549,7 @@ } }, { - "number": 32, + "number": 31, "column": { "name": "c_timestamp", "length": 19, @@ -1672,96 +1562,6 @@ }, "originalName": "c_timestamp" } - }, - { - "number": 33, - "column": { - "name": "c_binary", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - } - }, - { - "number": 34, - "column": { - "name": "c_varbinary", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - } - }, - { - "number": 35, - "column": { - "name": "c_tinyblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - } - }, - { - "number": 36, - "column": { - "name": "c_blob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - } - }, - { - "number": 37, - "column": { - "name": "c_mediumblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - } - }, - { - "number": 38, - "column": { - "name": "c_longblob", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" - } } ], "filename": "query.sql", @@ -1770,7 +1570,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2125,89 +1925,12 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" - }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nFROM mysql_types\nGROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, \n c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, \n c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -2242,17 +1965,6 @@ }, "originalName": "c_boolean" }, - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, { "name": "c_tinyint", "length": 3, @@ -2571,78 +2283,12 @@ "name": "timestamp" }, "originalName": "c_timestamp" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "varbinary" - }, - "originalName": "c_varbinary" - }, - { - "name": "c_tinyblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyblob" - }, - "originalName": "c_tinyblob" - }, - { - "name": "c_blob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "blob" - }, - "originalName": "c_blob" - }, - { - "name": "c_mediumblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumblob" - }, - "originalName": "c_mediumblob" - }, - { - "name": "c_longblob", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "longblob" - }, - "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", + "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ @@ -2682,6 +2328,426 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } + }, + { + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } + }, + { + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, + { + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } + }, + { + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, + { + "number": 7, + "column": { + "name": "c_longblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } + }, + { + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + }, + { + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + }, + { + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, + { + "name": "c_tinyblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + }, + { + "name": "c_mediumblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + }, + { + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", + "cmd": ":exec", + "filename": "query.sql" + }, { "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", "name": "GetAuthor", diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index 61bb166b..d7d2f630 100644 --- a/examples/MySqlConnectorLegacyExample/request.message +++ b/examples/MySqlConnectorLegacyExample/request.message @@ -2,7 +2,7 @@ ° 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╔ $examples/MySqlConnectorLegacyExamplecsharpВ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunnerыpublic"БpublicО +./dist/LocalRunner╡public"╦public═  mysql_types$ c_bool0R  mysql_typesb tinyint' c_boolean0R  mysql_typesb tinyint' @@ -49,19 +49,20 @@ c_longtext0 c_json0         R  mysql_typesbjson: c_json_string_override0         R  mysql_typesbjson/ c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_set -c_bit0R  mysql_typesbbit% -c_binary0R  mysql_typesbbinary+ +c_set0R  mysql_typesbmysql_types_c_setИ +mysql_binary_types& +c_bit0Rmysql_binary_typesbbit, +c_binary0Rmysql_binary_typesbbinary2 c_varbinary0 -R  mysql_typesb  varbinary2 +Rmysql_binary_typesb  varbinary9 -c_tinyblob0         R  mysql_typesb -tinyblob* -c_blob0         R  mysql_typesbblob6 - c_mediumblob0         R  mysql_typesb  -mediumblob2 +c_tinyblob0         Rmysql_binary_typesb +tinyblob1 +c_blob0         Rmysql_binary_typesbblob= + c_mediumblob0         Rmysql_binary_typesb  +mediumblob9 -c_longblob0         R  mysql_typesb +c_longblob0         Rmysql_binary_typesb longblobА authors& id0         R authorsbbigint& @@ -81,150 +82,165 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_type┤ -╗INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*2. -c_bit0Rpublic mysql_typesbbitzc_bit*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF + author_type0Rextendedbiosbbios_author_typeу +╝ +INSERT INTO mysql_types +( + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 +c_bool0Rpublic mysql_typesb tinyintzc_bool*>: + c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: + c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF c_smallint0         Rpublic mysql_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D @ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> -: +c_smallint*MI + c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 +c_int0         Rpublic mysql_typesbintzc_int*C? + c_integer0         Rpublic mysql_typesbintz c_integer*D@ +c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> : c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublic mysql_typesb decimalz c_decimal*6 +2 c_dec0 -Rpublic mysql_typesb decimalzc_dec*> : +Rpublic mysql_typesb decimalzc_dec*> : c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*: 6 +Rpublic mysql_typesb decimalz c_numeric*: 6 c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*A= -c_float0         Rpublic mysql_typesbfloatzc_float*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF +Rpublic mysql_typesb decimalzc_fixed*A = +c_float0         Rpublic mysql_typesbfloatzc_float*D@ +c_double0         Rpublic mysql_typesbdoublezc_double*XT +c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: +c_char0         Rpublic mysql_typesbcharzc_char*@< +c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL +c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: + c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF c_tinytext0         Rpublic mysql_typesb tinytextz -c_tinytext*PL +c_tinytext*PL c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +mediumtextz c_mediumtext*>: +c_text0         Rpublic mysql_typesbtextzc_text*JF c_longtext0         Rpublic mysql_typesb longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= +c_longtext*>: +c_json0         Rpublic mysql_typesbjsonzc_json*^Z +c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? +c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: +c_year0         Rpublic mysql_typesbyearzc_year*>: +c_date0         Rpublic mysql_typesbdatezc_date*A= c_datetime0Rpublic mysql_typesb datetimez -c_datetime*D @ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp*;!7 -c_binary0Rpublic mysql_typesbbinaryzc_binary*D"@ - c_varbinary0 -Rpublic mysql_typesb  varbinaryz c_varbinary*J#F - -c_tinyblob0         Rpublic mysql_typesb -tinyblobz -c_tinyblob*>$: -c_blob0         Rpublic mysql_typesbblobzc_blob*P%L - c_mediumblob0         Rpublic mysql_typesb  -mediumblobz c_mediumblob*J&F - -c_longblob0         Rpublic mysql_typesb -longblobz -c_longblob: query.sqlB  mysql_types╝ -║INSERT INTO mysql_types -(c_bit, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*2. -c_bit0Rpublic mysql_typesbbitzc_bit*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +c_datetime*D@ + c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp2 Basic types : query.sqlB  mysql_types▄ +╗INSERT INTO mysql_types +( + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 +c_bool0Rpublic mysql_typesb tinyintzc_bool*>: + c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: + c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF c_smallint0         Rpublic mysql_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D @ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A -= -c_float0         Rpublic mysql_typesbfloatzc_float*> : +c_smallint*MI + c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 +c_int0         Rpublic mysql_typesbintzc_int*C? + c_integer0         Rpublic mysql_typesbintz c_integer*D@ +c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A = +c_float0         Rpublic mysql_typesbfloatzc_float*> +: c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*> : +Rpublic mysql_typesb decimalz c_numeric*> : c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublic mysql_typesb decimalz c_decimal*6 2 c_dec0 -Rpublic mysql_typesb decimalzc_dec*:6 +Rpublic mysql_typesb decimalzc_dec*: 6 c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF +Rpublic mysql_typesb decimalzc_fixed*D@ +c_double0         Rpublic mysql_typesbdoublezc_double*XT +c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: +c_char0         Rpublic mysql_typesbcharzc_char*@< +c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL +c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: + c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF c_tinytext0         Rpublic mysql_typesb tinytextz -c_tinytext*PL +c_tinytext*PL c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +mediumtextz c_mediumtext*>: +c_text0         Rpublic mysql_typesbtextzc_text*JF c_longtext0         Rpublic mysql_typesb longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= +c_longtext*>: +c_json0         Rpublic mysql_typesbjsonzc_json*^Z +c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? +c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: +c_year0         Rpublic mysql_typesbyearzc_year*>: +c_date0         Rpublic mysql_typesbdatezc_date*A= c_datetime0Rpublic mysql_typesb datetimez -c_datetime*D @ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp*;!7 -c_binary0Rpublic mysql_typesbbinaryzc_binary*D"@ - c_varbinary0 -Rpublic mysql_typesb  varbinaryz c_varbinary*J#F - -c_tinyblob0         Rpublic mysql_typesb -tinyblobz -c_tinyblob*>$: -c_blob0         Rpublic mysql_typesbblobzc_blob*P%L - c_mediumblob0         Rpublic mysql_typesb  -mediumblobz c_mediumblob*J&F - -c_longblob0         Rpublic mysql_typesb -longblobz -c_longblob: query.sqlB  mysql_typesО -┼SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_types LIMIT 1 GetMysqlTypes:one", +c_datetime*D@ + c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp: query.sqlB  mysql_types╡ +∙SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", c_bool0R  mysql_typesb tinyintzc_bool"2 c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> @@ -274,38 +290,71 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set"& -c_bit0R  mysql_typesbbitzc_bit"/ -c_binary0R  mysql_typesbbinaryzc_binary"8 - c_varbinary0 -R  mysql_typesb  varbinaryz c_varbinary"> - -c_tinyblob0         R  mysql_typesb -tinyblobz -c_tinyblob"2 -c_blob0         R  mysql_typesbblobzc_blob"D - c_mediumblob0         R  mysql_typesb  -mediumblobz c_mediumblob"> - -c_longblob0         R  mysql_typesb -longblobz -c_longblob: query.sql■ -╜SELECT COUNT(1) AS cnt, c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlї +┴SELECT + COUNT(*) AS cnt, + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp FROM mysql_types -GROUP BY c_bool, c_boolean, c_bit, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, - c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob +GROUP BY + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set, + c_year, + c_date, + c_datetime, + c_timestamp LIMIT 1GetMysqlTypesCnt:one" cnt0         @bbigint", c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"& -c_bit0R  mysql_typesbbitzc_bit"2 + c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> c_smallint0         R  mysql_typesb @@ -351,27 +400,121 @@ c_longtext"2 c_datetime0R  mysql_typesb datetimez c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp"/ -c_binary0R  mysql_typesbbinaryzc_binary"8 + c_timestamp0R  mysql_typesb  timestampz c_timestamp: query.sqlО +{SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_typesGetMysqlFunctions:one" +max_int0         @bany"# + max_varchar0         @bany"% + max_timestamp0         @bany: query.sqlB +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlУ +о +INSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?)InsertMysqlBinaryTypes:exec*95 +c_bit0Rpublicmysql_binary_typesbbitzc_bit*B> +c_binary0Rpublicmysql_binary_typesbbinaryzc_binary*KG c_varbinary0 -R  mysql_typesb  varbinaryz c_varbinary"> +Rpublicmysql_binary_typesb  varbinaryz c_varbinary*QM -c_tinyblob0         R  mysql_typesb +c_tinyblob0         Rpublicmysql_binary_typesb tinyblobz -c_tinyblob"2 -c_blob0         R  mysql_typesbblobzc_blob"D - c_mediumblob0         R  mysql_typesb  -mediumblobz c_mediumblob"> +c_tinyblob*EA +c_blob0         Rpublicmysql_binary_typesbblobzc_blob*WS + c_mediumblob0         Rpublicmysql_binary_typesb  +mediumblobz c_mediumblob*QM -c_longblob0         R  mysql_typesb +c_longblob0         Rpublicmysql_binary_typesb longblobz -c_longblob: query.sqlВ -oSELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_typesGetMysqlFunctions:one" -max_int0         @bany"# - max_varchar0         @bany"% - max_timestamp0         @bany: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlМ +c_longblob2 Binary types : query.sqlBmysql_binary_typesЛ +нINSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?)InsertMysqlBinaryTypesBatch :copyfrom*95 +c_bit0Rpublicmysql_binary_typesbbitzc_bit*B> +c_binary0Rpublicmysql_binary_typesbbinaryzc_binary*KG + c_varbinary0 +Rpublicmysql_binary_typesb  varbinaryz c_varbinary*QM + +c_tinyblob0         Rpublicmysql_binary_typesb +tinyblobz +c_tinyblob*EA +c_blob0         Rpublicmysql_binary_typesbblobzc_blob*WS + c_mediumblob0         Rpublicmysql_binary_typesb  +mediumblobz c_mediumblob*QM + +c_longblob0         Rpublicmysql_binary_typesb +longblobz +c_longblob: query.sqlBmysql_binary_types╫ +qSELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1GetMysqlBinaryTypes:one"- +c_bit0Rmysql_binary_typesbbitzc_bit"6 +c_binary0Rmysql_binary_typesbbinaryzc_binary"? + c_varbinary0 +Rmysql_binary_typesb  varbinaryz c_varbinary"E + +c_tinyblob0         Rmysql_binary_typesb +tinyblobz +c_tinyblob"9 +c_blob0         Rmysql_binary_typesbblobzc_blob"K + c_mediumblob0         Rmysql_binary_typesb  +mediumblobz c_mediumblob"E + +c_longblob0         Rmysql_binary_typesb +longblobz +c_longblob: query.sqlЬ +ТSELECT + COUNT(*) AS cnt, + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +FROM mysql_binary_types +GROUP BY + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +LIMIT 1GetMysqlBinaryTypesCnt:one" +cnt0         @bbigint"- +c_bit0Rmysql_binary_typesbbitzc_bit"6 +c_binary0Rmysql_binary_typesbbinaryzc_binary"? + c_varbinary0 +Rmysql_binary_typesb  varbinaryz c_varbinary"E + +c_tinyblob0         Rmysql_binary_typesb +tinyblobz +c_tinyblob"9 +c_blob0         Rmysql_binary_typesbblobzc_blob"K + c_mediumblob0         Rmysql_binary_typesb  +mediumblobz c_mediumblob"E + +c_longblob0         Rmysql_binary_typesb +longblobz +c_longblob: query.sqlO +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlМ 8SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1 GetAuthor:one"* id0         R authorsbbigintzid", name0         R authorsbtextzname"( diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 7637176d..4b2aca39 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -187,7 +187,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -235,7 +235,7 @@ public class GetPostgresTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; public class GetPostgresTypesCntRow { public short? CSmallint { get; init; } @@ -282,7 +282,7 @@ public class GetPostgresTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; public class GetPostgresFunctionsRow { public int? MaxInteger { get; init; } @@ -366,7 +366,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; init; } @@ -510,7 +510,7 @@ public async Task InsertPostgresArrayTypesBatch(List(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public required long Id { get; init; } @@ -1038,7 +1038,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public required Author? Author { get; init; } @@ -1079,7 +1079,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public required Author? Author { get; init; } @@ -1120,7 +1120,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public required long Id { get; init; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index b67cc62b..ffcec1d6 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -188,7 +188,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -236,7 +236,7 @@ public async Task GetPostgresTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -283,7 +283,7 @@ public async Task GetPostgresTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -367,7 +367,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; set; } @@ -511,7 +511,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetAuthor(GetAuthorArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public long Id { get; set; } @@ -1039,7 +1039,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -1080,7 +1080,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -1121,7 +1121,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public long Id { get; set; } diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index d02dbba7..fc019632 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -164,7 +164,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { @@ -262,7 +262,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); public async Task GetPostgresTypesCnt() { @@ -358,7 +358,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() { @@ -483,7 +483,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task GetPostgresUnstructuredTypes() { @@ -710,7 +710,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() { @@ -1011,7 +1011,7 @@ public async Task TruncatePostgresGeoTypes() return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); public readonly record struct ListAuthorsArgs(int Offset, int Limit); public async Task> ListAuthors(ListAuthorsArgs args) @@ -1454,7 +1454,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); public async Task> ListAllAuthorsBooks() { @@ -1491,7 +1491,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); public async Task> GetDuplicateAuthors() { @@ -1528,7 +1528,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); public readonly record struct GetAuthorsByBookNameArgs(string Name); public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 8faba2b4..fcb9e18b 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -217,7 +217,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 :: TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -342,7 +342,7 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT (* ) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -464,7 +464,7 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_varchar ) AS max_varchar, MAX (c_timestamp ) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -602,7 +602,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; set; } @@ -858,7 +858,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; public class ListAuthorsRow { public long Id { get; set; } @@ -1713,7 +1713,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -1754,7 +1754,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -1795,7 +1795,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public long Id { get; set; } diff --git a/examples/SqliteDapperExample/QuerySql.cs b/examples/SqliteDapperExample/QuerySql.cs index 62b430ee..0ccac6e2 100644 --- a/examples/SqliteDapperExample/QuerySql.cs +++ b/examples/SqliteDapperExample/QuerySql.cs @@ -408,7 +408,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public required Author? Author { get; init; } @@ -450,7 +450,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public required Author? Author { get; init; } @@ -492,7 +492,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public required int Id { get; init; } @@ -563,7 +563,7 @@ public async Task DeleteAllAuthors() await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer , @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; public class InsertSqliteTypesArgs { public int? CInteger { get; init; } @@ -650,7 +650,7 @@ public class GetSqliteTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } - private const string GetSqliteTypesCntSql = "SELECT c_integer , c_real, c_text, c_blob, COUNT (* ) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; public class GetSqliteTypesCntRow { public int? CInteger { get; init; } @@ -678,7 +678,7 @@ public class GetSqliteTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } - private const string GetSqliteFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_real ) AS max_real, MAX (c_text ) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; public class GetSqliteFunctionsRow { public int? MaxInteger { get; init; } diff --git a/examples/SqliteDapperLegacyExample/QuerySql.cs b/examples/SqliteDapperLegacyExample/QuerySql.cs index b28123ce..0ac7dcc2 100644 --- a/examples/SqliteDapperLegacyExample/QuerySql.cs +++ b/examples/SqliteDapperLegacyExample/QuerySql.cs @@ -409,7 +409,7 @@ public async Task CreateBook(CreateBookArgs args) return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -451,7 +451,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -493,7 +493,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public int Id { get; set; } @@ -564,7 +564,7 @@ public async Task DeleteAllAuthors() await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer , @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; public class InsertSqliteTypesArgs { public int? CInteger { get; set; } @@ -651,7 +651,7 @@ public async Task GetSqliteTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } - private const string GetSqliteTypesCntSql = "SELECT c_integer , c_real, c_text, c_blob, COUNT (* ) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; public class GetSqliteTypesCntRow { public int? CInteger { get; set; } @@ -679,7 +679,7 @@ public async Task GetSqliteTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } - private const string GetSqliteFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_real ) AS max_real, MAX (c_text ) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; public class GetSqliteFunctionsRow { public int? MaxInteger { get; set; } diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index f4506830..ec59608d 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -562,7 +562,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); public async Task> ListAllAuthorsBooks() { @@ -600,7 +600,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); public async Task> GetDuplicateAuthors() { @@ -638,7 +638,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public readonly record struct GetAuthorsByBookNameRow(int Id, string Name, string? Bio, Book? Book); public readonly record struct GetAuthorsByBookNameArgs(string Name); public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) @@ -709,7 +709,7 @@ public async Task DeleteAllAuthors() } } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer , @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) { @@ -827,7 +827,7 @@ public async Task InsertSqliteTypesBatch(List args) return null; } - private const string GetSqliteTypesCntSql = "SELECT c_integer , c_real, c_text, c_blob, COUNT (* ) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); public async Task GetSqliteTypesCnt() { @@ -886,7 +886,7 @@ public async Task InsertSqliteTypesBatch(List args) return null; } - private const string GetSqliteFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_real ) AS max_real, MAX (c_text ) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); public async Task GetSqliteFunctions() { diff --git a/examples/SqliteLegacyExample/QuerySql.cs b/examples/SqliteLegacyExample/QuerySql.cs index 999fc912..688f29d7 100644 --- a/examples/SqliteLegacyExample/QuerySql.cs +++ b/examples/SqliteLegacyExample/QuerySql.cs @@ -647,7 +647,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -689,7 +689,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1 . id , authors1 . name, authors1 . bio, authors2 . id, authors2 . name, authors2 . bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -731,7 +731,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors . id , authors . name, authors . bio, books . id, books . name, books . author_id, books . description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; public class GetAuthorsByBookNameRow { public int Id { get; set; } @@ -811,7 +811,7 @@ public async Task DeleteAllAuthors() } } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer , @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; public class InsertSqliteTypesArgs { public int? CInteger { get; set; } @@ -946,7 +946,7 @@ public async Task GetSqliteTypes() return null; } - private const string GetSqliteTypesCntSql = "SELECT c_integer , c_real, c_text, c_blob, COUNT (* ) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; public class GetSqliteTypesCntRow { public int? CInteger { get; set; } @@ -1012,7 +1012,7 @@ public async Task GetSqliteTypesCnt() return null; } - private const string GetSqliteFunctionsSql = "SELECT MAX ( c_integer ) AS max_integer , MAX (c_real ) AS max_real, MAX (c_text ) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; public class GetSqliteFunctionsRow { public int? MaxInteger { get; set; } diff --git a/examples/config/mysql/types/query.sql b/examples/config/mysql/types/query.sql index 7b189a22..269c60ab 100644 --- a/examples/config/mysql/types/query.sql +++ b/examples/config/mysql/types/query.sql @@ -1,7 +1,8 @@ +/* Basic types */ + -- name: InsertMysqlTypes :exec INSERT INTO mysql_types ( - c_bit, c_bool, c_boolean, c_tinyint, @@ -26,15 +27,13 @@ INSERT INTO mysql_types c_year, c_date, c_datetime, - c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob + c_timestamp ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); -- name: InsertMysqlTypesBatch :copyfrom INSERT INTO mysql_types ( - c_bit, c_bool, c_boolean, c_tinyint, @@ -59,10 +58,9 @@ INSERT INTO mysql_types c_year, c_date, c_datetime, - c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob + c_timestamp ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); -- name: GetMysqlTypes :one SELECT * FROM mysql_types LIMIT 1; @@ -72,7 +70,6 @@ SELECT COUNT(*) AS cnt, c_bool, c_boolean, - c_bit, c_tinyint, c_smallint, c_mediumint, @@ -101,18 +98,11 @@ SELECT c_year, c_date, c_datetime, - c_timestamp, - c_binary, - c_varbinary, - c_tinyblob, - c_blob, - c_mediumblob, - c_longblob + c_timestamp FROM mysql_types GROUP BY c_bool, c_boolean, - c_bit, c_tinyint, c_smallint, c_mediumint, @@ -135,8 +125,7 @@ GROUP BY c_year, c_date, c_datetime, - c_timestamp, - c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob + c_timestamp LIMIT 1; -- name: GetMysqlFunctions :one @@ -148,3 +137,58 @@ FROM mysql_types; -- name: TruncateMysqlTypes :exec TRUNCATE TABLE mysql_types; + +/* Binary types */ + +-- name: InsertMysqlBinaryTypes :exec +INSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?); + +-- name: InsertMysqlBinaryTypesBatch :copyfrom +INSERT INTO mysql_binary_types +( + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +) +VALUES (?, ?, ?, ?, ?, ?, ?); + +-- name: GetMysqlBinaryTypes :one +SELECT * FROM mysql_binary_types LIMIT 1; + +-- name: GetMysqlBinaryTypesCnt :one +SELECT + COUNT(*) AS cnt, + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +FROM mysql_binary_types +GROUP BY + c_bit, + c_binary, + c_varbinary, + c_tinyblob, + c_blob, + c_mediumblob, + c_longblob +LIMIT 1; + +-- name: TruncateMysqlBinaryTypes :exec +TRUNCATE TABLE mysql_binary_types; diff --git a/examples/config/mysql/types/schema.sql b/examples/config/mysql/types/schema.sql index 9a120c8f..6d21f4ee 100644 --- a/examples/config/mysql/types/schema.sql +++ b/examples/config/mysql/types/schema.sql @@ -1,17 +1,13 @@ CREATE TABLE mysql_types ( - /* Boolean data types - TINYINT(1) synonyms */ - c_bool BOOL, - c_boolean BOOLEAN, - - /* Integer data types */ - c_tinyint TINYINT(3), - c_smallint SMALLINT, - c_mediumint MEDIUMINT, - c_int INT, - c_integer INTEGER, - c_bigint BIGINT, - - /* Float data types */ + /* Numeric data types */ + c_bool BOOL, + c_boolean BOOLEAN, + c_tinyint TINYINT(3), + c_smallint SMALLINT, + c_mediumint MEDIUMINT, + c_int INT, + c_integer INTEGER, + c_bigint BIGINT, c_float FLOAT, c_decimal DECIMAL(10,7), c_dec DEC(10,7), @@ -41,9 +37,10 @@ CREATE TABLE mysql_types ( /* Pre-defined types */ c_enum ENUM ('small', 'medium', 'big'), - c_set SET ('tea', 'coffee', 'milk'), + c_set SET ('tea', 'coffee', 'milk') +); - /* Binary data types */ +CREATE TABLE mysql_binary_types ( c_bit BIT(8), c_binary BINARY(3), c_varbinary VARBINARY(10), From ec8ce4c15f9ff30f45e9ac45b15f8be3e08a8250 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 15:53:14 +0200 Subject: [PATCH 08/33] fix: whitespace regex + more concise code --- CodeGenerator/Generators/QueriesGen.cs | 2 +- Drivers/Generators/ExecDeclareGen.cs | 10 +- Drivers/Generators/ExecLastIdDeclareGen.cs | 9 +- Drivers/Generators/ExecRowsDeclareGen.cs | 10 +- .../EndToEndScaffold/Templates/MySqlTests.cs | 24 +- .../MySqlConnectorDapperTester.cs | 3 +- .../MySqlConnectorDapperTester.generated.cs | 21 +- end2end/EndToEndTests/MySqlConnectorTester.cs | 3 +- .../MySqlConnectorTester.generated.cs | 21 +- .../MySqlConnectorDapperTester.cs | 3 +- .../MySqlConnectorDapperTester.generated.cs | 21 +- .../MySqlConnectorTester.cs | 3 +- .../MySqlConnectorTester.generated.cs | 21 +- .../MySqlConnectorDapperExample/Models.cs | 13 +- .../MySqlConnectorDapperExample/QuerySql.cs | 284 +++++---- .../MySqlConnectorDapperExample/request.json | 562 ++++++++++-------- .../request.message | Bin 22751 -> 23605 bytes .../Models.cs | 13 +- .../QuerySql.cs | 284 +++++---- .../request.json | 562 ++++++++++-------- .../request.message | 178 +++--- examples/MySqlConnectorExample/Models.cs | 3 +- examples/MySqlConnectorExample/QuerySql.cs | 419 +++++++++---- examples/MySqlConnectorExample/request.json | 562 ++++++++++-------- .../MySqlConnectorExample/request.message | Bin 22735 -> 23589 bytes .../MySqlConnectorLegacyExample/Models.cs | 13 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 460 +++++++++----- .../MySqlConnectorLegacyExample/request.json | 562 ++++++++++-------- .../request.message | 178 +++--- examples/NpgsqlDapperExample/QuerySql.cs | 121 +--- .../NpgsqlDapperLegacyExample/QuerySql.cs | 121 +--- examples/NpgsqlExample/QuerySql.cs | 99 +-- examples/NpgsqlLegacyExample/QuerySql.cs | 99 +-- examples/SqliteDapperExample/QuerySql.cs | 75 +-- .../SqliteDapperLegacyExample/QuerySql.cs | 75 +-- examples/SqliteExample/QuerySql.cs | 58 +- examples/SqliteLegacyExample/QuerySql.cs | 58 +- examples/config/mysql/types/query.sql | 82 ++- examples/config/mysql/types/schema.sql | 15 +- 39 files changed, 2739 insertions(+), 2308 deletions(-) diff --git a/CodeGenerator/Generators/QueriesGen.cs b/CodeGenerator/Generators/QueriesGen.cs index 88b13cd0..35988432 100644 --- a/CodeGenerator/Generators/QueriesGen.cs +++ b/CodeGenerator/Generators/QueriesGen.cs @@ -146,7 +146,7 @@ private IEnumerable GetMembersForSingleQuery(Query quer } - [GeneratedRegex(@"\s{2,}")] + [GeneratedRegex(@"\s{1,}")] private static partial Regex LongWhitespaceRegex(); private MemberDeclarationSyntax AddMethodDeclaration(Query query) diff --git a/Drivers/Generators/ExecDeclareGen.cs b/Drivers/Generators/ExecDeclareGen.cs index d4e7cf13..d2eee37c 100644 --- a/Drivers/Generators/ExecDeclareGen.cs +++ b/Drivers/Generators/ExecDeclareGen.cs @@ -49,9 +49,7 @@ private string GetDapperNoTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" using ({{establishConnection}}) - { await {{Variable.Connection.AsVarName()}}.ExecuteAsync({{sqlVar}}{{dapperArgs}}); - } return; """; } @@ -62,9 +60,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.{{transactionProperty}}.Connection.ExecuteAsync( {{sqlVar}}{{dapperArgs}}, @@ -99,9 +95,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { diff --git a/Drivers/Generators/ExecLastIdDeclareGen.cs b/Drivers/Generators/ExecLastIdDeclareGen.cs index f5e8beda..ab086d4b 100644 --- a/Drivers/Generators/ExecLastIdDeclareGen.cs +++ b/Drivers/Generators/ExecLastIdDeclareGen.cs @@ -49,9 +49,7 @@ private string GetDapperNoTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" using ({{establishConnection}}) - { return await {{Variable.Connection.AsVarName()}}.QuerySingleAsync<{{dbDriver.GetIdColumnType(query)}}>({{sqlVar}}{{dapperArgs}}); - } """; } @@ -61,10 +59,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.{{transactionProperty}}.Connection.QuerySingleAsync<{{dbDriver.GetIdColumnType(query)}}>({{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}}); """; } @@ -97,9 +92,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { diff --git a/Drivers/Generators/ExecRowsDeclareGen.cs b/Drivers/Generators/ExecRowsDeclareGen.cs index fde479c9..21ab264e 100644 --- a/Drivers/Generators/ExecRowsDeclareGen.cs +++ b/Drivers/Generators/ExecRowsDeclareGen.cs @@ -49,9 +49,7 @@ private string GetDapperNoTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" using ({{establishConnection}}) - { return await {{Variable.Connection.AsVarName()}}.ExecuteAsync({{sqlVar}}{{dapperArgs}}); - } """; } @@ -61,9 +59,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.{{transactionProperty}}.Connection.ExecuteAsync( {{sqlVar}}{{dapperArgs}}, @@ -97,9 +93,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) return $$""" if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index 5699cfba..fe1dbce0 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -183,23 +183,23 @@ public async Task TestMySqlDateTimeTypes( DateTime? cDate, DateTime? cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysqlDatetimeTypesRow y) { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -504,7 +504,7 @@ public async Task TestDateTimeCopyFrom( DateTime? cTimestamp) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, @@ -512,8 +512,8 @@ public async Task TestDateTimeCopyFrom( CTimestamp = cTimestamp }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { Cnt = batchSize, CYear = cYear, @@ -521,10 +521,10 @@ public async Task TestDateTimeCopyFrom( CDatetime = cDatetime, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMysqlDatetimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CYear, Is.EqualTo(y.CYear)); @@ -844,9 +844,13 @@ public async Task TestMySqlDataTypesOverride( await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, - CVarchar = cVarchar, + CVarchar = cVarchar + }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs + { CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlFunctionsRow { MaxInt = cInt, diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs index c2e8c6e3..177141dd 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs @@ -14,8 +14,9 @@ public partial class MySqlConnectorDapperTester public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); + await QuerySql.TruncateExtendedBios(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); - await QuerySql.TruncateExtendedBios(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index 8c8dc7c8..c384a500 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -467,16 +467,16 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1970-1-1 00:00:01")] public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysqlDatetimeTypesRow y) { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -548,7 +548,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { MaxInt = cInt, @@ -756,9 +757,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null)] public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { Cnt = batchSize, CYear = cYear, @@ -766,9 +767,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CDatetime = cDatetime, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMysqlDatetimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CYear, Is.EqualTo(y.CYear)); diff --git a/end2end/EndToEndTests/MySqlConnectorTester.cs b/end2end/EndToEndTests/MySqlConnectorTester.cs index 6c112bc8..9d580ca1 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.cs @@ -14,8 +14,9 @@ public partial class MySqlConnectorTester public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); + await QuerySql.TruncateExtendedBios(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); - await QuerySql.TruncateExtendedBios(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index c610480c..1d5f4e37 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -467,16 +467,16 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1970-1-1 00:00:01")] public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysqlDatetimeTypesRow y) { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -548,7 +548,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { MaxInt = cInt, @@ -756,9 +757,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null)] public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { Cnt = batchSize, CYear = cYear, @@ -766,9 +767,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CDatetime = cDatetime, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMysqlDatetimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CYear, Is.EqualTo(y.CYear)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs index da6ef514..ff474438 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs @@ -14,9 +14,10 @@ public partial class MySqlConnectorDapperTester public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); + await QuerySql.TruncateExtendedBios(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); - await QuerySql.TruncateExtendedBios(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 60e28e85..1d015348 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -467,16 +467,16 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1970-1-1 00:00:01")] public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysqlDatetimeTypesRow y) { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -548,7 +548,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { MaxInt = cInt, @@ -756,9 +757,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null)] public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { Cnt = batchSize, CYear = cYear, @@ -766,9 +767,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CDatetime = cDatetime, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMysqlDatetimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CYear, Is.EqualTo(y.CYear)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs index e39c25dc..9573c794 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs @@ -14,9 +14,10 @@ public partial class MySqlConnectorTester public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); + await QuerySql.TruncateExtendedBios(); await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); - await QuerySql.TruncateExtendedBios(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index 4c6c66c5..179fd1c5 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -467,16 +467,16 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1970-1-1 00:00:01")] public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysqlDatetimeTypesRow y) { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -548,7 +548,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { MaxInt = cInt, @@ -756,9 +757,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null)] public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { Cnt = batchSize, CYear = cYear, @@ -766,9 +767,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CDatetime = cDatetime, CTimestamp = cTimestamp }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMysqlDatetimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CYear, Is.EqualTo(y.CYear)); diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index 464f9d50..c45114c7 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -22,11 +22,6 @@ public class MysqlType public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } - public short? CYear { get; init; } - public DateTime? CDate { get; init; } - public string? CTime { get; init; } - public DateTime? CDatetime { get; init; } - public DateTime? CTimestamp { get; init; } public string? CChar { get; init; } public string? CNchar { get; init; } public string? CNationalChar { get; init; } @@ -40,6 +35,14 @@ public class MysqlType public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } }; +public class MysqlDatetimeType +{ + public short? CYear { get; init; } + public DateTime? CDate { get; init; } + public string? CTime { get; init; } + public DateTime? CDatetime { get; init; } + public DateTime? CTimestamp { get; init; } +}; public class MysqlBinaryType { public byte? CBit { get; init; } diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index ac0f766f..64e53aec 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -76,7 +76,7 @@ public class GetAuthorArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public required long Id { get; init; } @@ -123,18 +123,12 @@ public async Task CreateAuthor(CreateAuthorArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -152,16 +146,11 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } @@ -197,7 +186,7 @@ public class GetAuthorByIdArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public required long Id { get; init; } @@ -226,7 +215,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public required string Name { get; init; } @@ -238,18 +227,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -259,22 +242,16 @@ public async Task DeleteAllAuthors() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string? Bio { get; init; } @@ -286,16 +263,11 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } @@ -381,16 +353,11 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } @@ -436,7 +403,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public required Author? Author { get; init; } @@ -478,7 +445,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public required long Id { get; init; } @@ -546,18 +513,12 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } @@ -600,22 +561,16 @@ public async Task TruncateExtendedBios() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateExtendedBiosSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; public class InsertMysqlTypesArgs { public bool? CBool { get; init; } @@ -645,10 +600,6 @@ public class InsertMysqlTypesArgs public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } - public short? CYear { get; init; } - public DateTime? CDate { get; init; } - public DateTime? CDatetime { get; init; } - public DateTime? CTimestamp { get; init; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { @@ -680,25 +631,15 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_json_string_override", args.CJsonStringOverride); queryParams.Add("c_enum", args.CEnum); queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); - queryParams.Add("c_year", args.CYear); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_datetime", args.CDatetime); - queryParams.Add("c_timestamp", args.CTimestamp); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); } @@ -731,10 +672,6 @@ public class InsertMysqlTypesBatchArgs public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } - public short? CYear { get; init; } - public DateTime? CDate { get; init; } - public DateTime? CDatetime { get; init; } - public DateTime? CTimestamp { get; init; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -769,7 +706,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -787,13 +723,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; init; } @@ -811,11 +747,6 @@ public class GetMysqlTypesRow public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } - public short? CYear { get; init; } - public DateTime? CDate { get; init; } - public string? CTime { get; init; } - public DateTime? CDatetime { get; init; } - public DateTime? CTimestamp { get; init; } public string? CChar { get; init; } public string? CNchar { get; init; } public string? CNationalChar { get; init; } @@ -848,7 +779,7 @@ public class GetMysqlTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; public class GetMysqlTypesCntRow { public required long Cnt { get; init; } @@ -879,10 +810,6 @@ public class GetMysqlTypesCntRow public string? CJsonStringOverride { get; init; } public MysqlTypesCEnum? CEnum { get; init; } public HashSet? CSet { get; init; } - public short? CYear { get; init; } - public DateTime? CDate { get; init; } - public DateTime? CDatetime { get; init; } - public DateTime? CTimestamp { get; init; } }; public async Task GetMysqlTypesCnt() { @@ -903,20 +830,117 @@ public class GetMysqlTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public class GetMysqlFunctionsRow + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() { - public int? MaxInt { get; init; } - public string? MaxVarchar { get; init; } - public required DateTime MaxTimestamp { get; init; } + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + public class InsertMysqlDatetimeTypesArgs + { + public short? CYear { get; init; } + public DateTime? CDate { get; init; } + public DateTime? CDatetime { get; init; } + public DateTime? CTimestamp { get; init; } }; - public async Task GetMysqlFunctions() + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_year", args.CYear); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_datetime", args.CDatetime); + queryParams.Add("c_timestamp", args.CTimestamp); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlDatetimeTypesBatchArgs + { + public short? CYear { get; init; } + public DateTime? CDate { get; init; } + public DateTime? CDatetime { get; init; } + public DateTime? CTimestamp { get; init; } + }; + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + public class GetMysqlDatetimeTypesRow + { + public short? CYear { get; init; } + public DateTime? CDate { get; init; } + public string? CTime { get; init; } + public DateTime? CDatetime { get; init; } + public DateTime? CTimestamp { get; init; } + }; + public async Task GetMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql); return result; } } @@ -926,20 +950,27 @@ public class GetMysqlFunctionsRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + public class GetMysqlDatetimeTypesCntRow + { + public required long Cnt { get; init; } + public short? CYear { get; init; } + public DateTime? CDate { get; init; } + public DateTime? CDatetime { get; init; } + public DateTime? CTimestamp { get; init; } + }; + public async Task GetMysqlDatetimeTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncateMysqlTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -947,10 +978,25 @@ public async Task TruncateMysqlTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; public class InsertMysqlBinaryTypesArgs { public byte? CBit { get; init; } @@ -974,18 +1020,12 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); } @@ -1077,7 +1117,7 @@ public class GetMysqlBinaryTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; public class GetMysqlBinaryTypesCntRow { public required long Cnt { get; init; } @@ -1114,18 +1154,38 @@ public async Task TruncateMysqlBinaryTypes() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); - } - return; } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + public class GetMysqlFunctionsRow + { + public int? MaxInt { get; init; } + public string? MaxVarchar { get; init; } + public required DateTime MaxTimestamp { get; init; } + }; + public async Task GetMysqlFunctions() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); + return result; + } + } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index 0054e5a0..42c5e321 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -181,173 +181,180 @@ } }, { - "name": "c_year", + "name": "c_char", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "year" + "name": "char" } }, { - "name": "c_date", + "name": "c_nchar", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "date" + "name": "char" } }, { - "name": "c_time", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "time" + "name": "char" } }, { - "name": "c_datetime", - "length": 19, + "name": "c_varchar", + "length": 100, "table": { "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "varchar" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_tinytext", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "tinytext" } }, { - "name": "c_char", + "name": "c_mediumtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumtext" } }, { - "name": "c_nchar", + "name": "c_text", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "text" } }, { - "name": "c_national_char", + "name": "c_longtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "longtext" } }, { - "name": "c_varchar", - "length": 100, + "name": "c_json", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "json" } }, { - "name": "c_tinytext", + "name": "c_json_string_override", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "json" } }, { - "name": "c_mediumtext", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "mysql_types_c_enum" } }, { - "name": "c_text", - "length": -1, + "name": "c_set", + "length": 15, "table": { "name": "mysql_types" }, "type": { - "name": "text" + "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_datetime_types" + }, + "columns": [ { - "name": "c_longtext", + "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "longtext" + "name": "year" } }, { - "name": "c_json", + "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "date" } }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "time" } }, { - "name": "c_enum", - "length": 6, + "name": "c_datetime", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "datetime" } }, { - "name": "c_set", - "length": 15, + "name": "c_timestamp", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_set" + "name": "timestamp" } } ] @@ -1466,7 +1473,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ @@ -1874,66 +1881,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "comments": [ @@ -1945,7 +1892,7 @@ } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -2353,66 +2300,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "filename": "query.sql", @@ -2421,7 +2308,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2590,61 +2477,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -2781,7 +2613,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -3090,12 +2922,171 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "comments": [ + " Datetime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ { "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "year" @@ -3106,18 +3097,29 @@ "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "date" }, "originalName": "c_date" }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + }, { "name": "c_datetime", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "datetime" @@ -3128,7 +3130,7 @@ "name": "c_timestamp", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "timestamp" @@ -3139,43 +3141,69 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_year", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "year" + }, + "originalName": "c_year" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_date", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -3598,6 +3626,44 @@ "name": "TruncateMysqlBinaryTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 9ef230d7bd3bceea1e09bcfc76ea6ac946482519..869ab2f07f9ddf131653c452e4cf9cf49121e564 100644 GIT binary patch delta 1876 zcmchX-A~g{7{=3fE8`3zv}2Ga(6eb&8iYBOM3yXUV~kF=nXdQ|Q3A6UBrsqbiNlNO z9T&!EPK-p2iFXEL#GGJ^iT7TK@y5g({{w}XsKFR*SBA5+NaDw?n)Q93_q@O7>3R2< zSbj<@eI#4HxfptSc5Gri<3()ckz23DPHLG%eR~_wE4mp!qj|ElKms@Cnk?pLCPvPW z7L>yHq%u+{PAhrl5TG+7MP)SS`(F7Gl11?t&Dpe4y-5?=SyRcT)iQ!MN*nD1+fC3a z7(W1+&7=BQ%X+{$Dhn}(fm7Gd7e*(itC~EUO8U}cPOes)Qm*6XDd0l)j=Auf;KC+0fb+1$SVRVI5%j)clELfp&Hn>FKlf5?yye~XNYcEhP-e>zlfjnB{*~ZE` z`cWa+kNDoB=s`zQshw#Nvw0;~NN7g(ZTKi_HgfhJ<${tCVh)0dm^s?E84%MLRGgjL z2!kp_>O$IabeDN8AB1G7gkBD~JB7G|V)_l{S-1ECZCygbVPiz2ch)`gf@in^R@R4Q z(1+J5?}Yi}Z5uDUcJB>8K<_F)62@+4oSz+uhEzn(#)~lkU>L>)qmu=t)fwDm_ zDt7q2mcT$vN(~){;nV0tDAd?uq=wGOU70_`wNqMum4#T%A`N!S(oixIlA}-#g$JXi z*Ag#3CUr zF1F-$vW<$lev{tCVx=W-K?)v%+97|#~~wRHMziBWb$5h@yW~F z1$gZ^WQDBYUfHOwtjs3G0&-Cm)O%Wv9CAXosGgYoQAJ|1v4@0+3x~Xr1DdPBl0iO_ z97enfPKm{t$&(kl2~Xau$oY zpYqV%%x?0J85YNrSGx0XDH*{71{lW@$f2U7i5l0FZ+P-bn4*RfJgO(_c*sd9S)hdz zLfYF$Ve^0UhfJ(rMY#^LPxcR0-ppa$3koFH%?oY1!J)NT*Di#ONy!KtdB8-7#eu3Q l4kX_(IxKxmT-B2gL{v|Xj%=IU=n}B`jL%!{%|9b;m;fqx1iAnK diff --git a/examples/MySqlConnectorDapperLegacyExample/Models.cs b/examples/MySqlConnectorDapperLegacyExample/Models.cs index 794514ec..4f11fdb5 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Models.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Models.cs @@ -23,11 +23,6 @@ public class MysqlType public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public string CTime { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } public string CChar { get; set; } public string CNchar { get; set; } public string CNationalChar { get; set; } @@ -41,6 +36,14 @@ public class MysqlType public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } }; + public class MysqlDatetimeType + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; public class MysqlBinaryType { public byte? CBit { get; set; } diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index c0eb8e62..3058db1a 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -45,7 +45,7 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; public class InsertMysqlTypesArgs { public bool? CBool { get; set; } @@ -75,10 +75,6 @@ public class InsertMysqlTypesArgs public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { @@ -110,25 +106,15 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_json_string_override", args.CJsonStringOverride); queryParams.Add("c_enum", args.CEnum); queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); - queryParams.Add("c_year", args.CYear); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_datetime", args.CDatetime); - queryParams.Add("c_timestamp", args.CTimestamp); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); } @@ -161,10 +147,6 @@ public class InsertMysqlTypesBatchArgs public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -198,7 +180,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -216,13 +197,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; set; } @@ -240,11 +221,6 @@ public class GetMysqlTypesRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public string CTime { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } public string CChar { get; set; } public string CNchar { get; set; } public string CNationalChar { get; set; } @@ -277,7 +253,7 @@ public async Task GetMysqlTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; public class GetMysqlTypesCntRow { public long Cnt { get; set; } @@ -308,10 +284,6 @@ public class GetMysqlTypesCntRow public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task GetMysqlTypesCnt() { @@ -332,20 +304,117 @@ public async Task GetMysqlTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public class GetMysqlFunctionsRow + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + public class InsertMysqlDatetimeTypesArgs + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } }; - public async Task GetMysqlFunctions() + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_year", args.CYear); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_datetime", args.CDatetime); + queryParams.Add("c_timestamp", args.CTimestamp); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlDatetimeTypesBatchArgs + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + public class GetMysqlDatetimeTypesRow + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task GetMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql); return result; } } @@ -355,20 +424,27 @@ public async Task GetMysqlFunctions() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + public class GetMysqlDatetimeTypesCntRow + { + public long Cnt { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task GetMysqlDatetimeTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.ExecuteAsync(TruncateMysqlTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql); + return result; } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -376,10 +452,25 @@ public async Task TruncateMysqlTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; public class InsertMysqlBinaryTypesArgs { public byte? CBit { get; set; } @@ -403,18 +494,12 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); } @@ -505,7 +590,7 @@ public async Task GetMysqlBinaryTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; public class GetMysqlBinaryTypesCntRow { public long Cnt { get; set; } @@ -542,19 +627,39 @@ public async Task TruncateMysqlBinaryTypes() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); - } - return; } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + public class GetMysqlFunctionsRow + { + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } + }; + public async Task GetMysqlFunctions() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); + return result; + } + } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; @@ -589,7 +694,7 @@ public async Task GetAuthor(GetAuthorArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public long Id { get; set; } @@ -636,18 +741,12 @@ public async Task CreateAuthor(CreateAuthorArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -665,16 +764,11 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } @@ -710,7 +804,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public long Id { get; set; } @@ -739,7 +833,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -751,18 +845,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -772,22 +860,16 @@ public async Task DeleteAllAuthors() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -799,16 +881,11 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } @@ -894,16 +971,11 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } @@ -949,7 +1021,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -991,7 +1063,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public long Id { get; set; } @@ -1059,18 +1131,12 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } @@ -1113,18 +1179,12 @@ public async Task TruncateExtendedBios() if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateExtendedBiosSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } } diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index f2f022c1..fb2c6f5f 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -181,173 +181,180 @@ } }, { - "name": "c_year", + "name": "c_char", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "year" + "name": "char" } }, { - "name": "c_date", + "name": "c_nchar", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "date" + "name": "char" } }, { - "name": "c_time", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "time" + "name": "char" } }, { - "name": "c_datetime", - "length": 19, + "name": "c_varchar", + "length": 100, "table": { "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "varchar" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_tinytext", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "tinytext" } }, { - "name": "c_char", + "name": "c_mediumtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumtext" } }, { - "name": "c_nchar", + "name": "c_text", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "text" } }, { - "name": "c_national_char", + "name": "c_longtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "longtext" } }, { - "name": "c_varchar", - "length": 100, + "name": "c_json", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "json" } }, { - "name": "c_tinytext", + "name": "c_json_string_override", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "json" } }, { - "name": "c_mediumtext", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "mysql_types_c_enum" } }, { - "name": "c_text", - "length": -1, + "name": "c_set", + "length": 15, "table": { "name": "mysql_types" }, "type": { - "name": "text" + "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_datetime_types" + }, + "columns": [ { - "name": "c_longtext", + "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "longtext" + "name": "year" } }, { - "name": "c_json", + "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "date" } }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "time" } }, { - "name": "c_enum", - "length": 6, + "name": "c_datetime", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "datetime" } }, { - "name": "c_set", - "length": 15, + "name": "c_timestamp", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_set" + "name": "timestamp" } } ] @@ -615,7 +622,7 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ @@ -1023,66 +1030,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "comments": [ @@ -1094,7 +1041,7 @@ } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -1502,66 +1449,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "filename": "query.sql", @@ -1570,7 +1457,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -1739,61 +1626,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -1930,7 +1762,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -2239,12 +2071,171 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "comments": [ + " Datetime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ { "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "year" @@ -2255,18 +2246,29 @@ "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "date" }, "originalName": "c_date" }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + }, { "name": "c_datetime", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "datetime" @@ -2277,7 +2279,7 @@ "name": "c_timestamp", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "timestamp" @@ -2288,43 +2290,69 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_year", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "year" + }, + "originalName": "c_year" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_date", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -2748,6 +2776,44 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" + }, { "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", "name": "GetAuthor", diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index 4bef43f4..54a1a1c2 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.message +++ b/examples/MySqlConnectorDapperLegacyExample/request.message @@ -2,7 +2,7 @@ Г 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╘ *examples/MySqlConnectorDapperLegacyExamplecsharpЗ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorDapperLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner╡public"╦public═ +./dist/LocalRunner¤public"Уpublic·  mysql_types$ c_bool0R  mysql_typesb tinyint' c_boolean0R  mysql_typesb tinyint' @@ -25,14 +25,6 @@ R  mysql_typesb decimal% R  mysql_typesb decimal. c_double0         R  mysql_typesbdouble8 c_double_precision0         R  mysql_typesbdouble* -c_year0         R  mysql_typesbyear* -c_date0         R  mysql_typesbdate! -c_time0 -R  mysql_typesbtime) - -c_datetime0R  mysql_typesb -datetime+ - c_timestamp0R  mysql_typesb  timestamp* c_char0         R  mysql_typesbchar+ c_nchar0         R  mysql_typesbchar3 c_national_char0         R  mysql_typesbchar' @@ -49,7 +41,16 @@ c_longtext0 c_json0         R  mysql_typesbjson: c_json_string_override0         R  mysql_typesbjson/ c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_setИ +c_set0R  mysql_typesbmysql_types_c_setШ +mysql_datetime_types3 +c_year0         Rmysql_datetime_typesbyear3 +c_date0         Rmysql_datetime_typesbdate* +c_time0 +Rmysql_datetime_typesbtime2 + +c_datetime0Rmysql_datetime_typesb +datetime4 + c_timestamp0Rmysql_datetime_typesb  timestampИ mysql_binary_types& c_bit0Rmysql_binary_typesbbit, c_binary0Rmysql_binary_typesbbinary2 @@ -82,8 +83,8 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_typeу -╝ + author_type0Rextendedbiosbbios_author_typeХ +ў INSERT INTO mysql_types ( c_bool, @@ -106,13 +107,9 @@ INSERT INTO mysql_types c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 c_bool0Rpublic mysql_typesb tinyintzc_bool*>: c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF @@ -154,15 +151,8 @@ c_longtext*>: c_json0         Rpublic mysql_typesbjsonzc_json*^Z c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= - -c_datetime0Rpublic mysql_typesb -datetimez -c_datetime*D@ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp2 Basic types : query.sqlB  mysql_types▄ -╗INSERT INTO mysql_types +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set2 Basic types : query.sqlB  mysql_typesО +ЎINSERT INTO mysql_types ( c_bool, c_boolean, @@ -184,13 +174,9 @@ c_datetime*D@ c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 c_bool0Rpublic mysql_typesb tinyintzc_bool*>: c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF @@ -232,15 +218,8 @@ c_longtext*>: c_json0         Rpublic mysql_typesbjsonzc_json*^Z c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= - -c_datetime0Rpublic mysql_typesb -datetimez -c_datetime*D@ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp: query.sqlB  mysql_types╡ -∙SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set: query.sqlB  mysql_typesА +╚SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", c_bool0R  mysql_typesb tinyintzc_bool"2 c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> @@ -263,15 +242,6 @@ R  mysql_typesb decimalz c_numeric". R  mysql_typesb decimalzc_fixed"8 c_double0         R  mysql_typesbdoublezc_double"L c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_year0         R  mysql_typesbyearzc_year"2 -c_date0         R  mysql_typesbdatezc_date") -c_time0 -R  mysql_typesbtimezc_time"5 - -c_datetime0R  mysql_typesb -datetimez -c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp"2 c_char0         R  mysql_typesbcharzc_char"4 c_nchar0         R  mysql_typesbcharzc_nchar"D c_national_char0         R  mysql_typesbcharzc_national_char"2 @@ -290,8 +260,8 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlї -┴SELECT +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlк +╧SELECT COUNT(*) AS cnt, c_bool, c_boolean, @@ -319,11 +289,7 @@ c_longtext"2 c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set FROM mysql_types GROUP BY c_bool, @@ -346,11 +312,7 @@ GROUP BY c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set LIMIT 1GetMysqlTypesCnt:one" cnt0         @bbigint", c_bool0R  mysql_typesb tinyintzc_bool"2 @@ -393,23 +355,71 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set"2 -c_year0         R  mysql_typesbyearzc_year"2 -c_date0         R  mysql_typesbdatezc_date"5 +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql° +s +INSERT INTO mysql_datetime_types +( + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC +c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC +c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF -c_datetime0R  mysql_typesb +c_datetime0Rpublicmysql_datetime_typesb datetimez -c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp: query.sqlО -{SELECT - MAX(c_int) AS max_int, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM mysql_typesGetMysqlFunctions:one" -max_int0         @bany"# - max_varchar0         @bany"% - max_timestamp0         @bany: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlУ +c_datetime*MI + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp2 Datetime types : query.sqlBmysql_datetime_typesю +rINSERT INTO mysql_datetime_types +( + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC +c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC +c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF + +c_datetime0Rpublicmysql_datetime_typesb +datetimez +c_datetime*MI + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp: query.sqlBmysql_datetime_types│ +XSELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"2 +c_time0 +Rmysql_datetime_typesbtimezc_time"> + +c_datetime0Rmysql_datetime_typesb +datetimez +c_datetime"A + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlБ +╢SELECT + COUNT(*) AS cnt, + c_year, + c_date, + c_datetime, + c_timestamp +FROM mysql_datetime_types +GROUP BY + c_year, + c_date, + c_datetime, + c_timestamp +LIMIT 1GetMysqlDatetimeTypesCnt:one" +cnt0         @bbigint"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"> + +c_datetime0Rmysql_datetime_typesb +datetimez +c_datetime"A + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlS +#TRUNCATE TABLE mysql_datetime_typesTruncateMysqlDatetimeTypes:exec: query.sqlУ о INSERT INTO mysql_binary_types ( @@ -514,7 +524,17 @@ mediumblobz c_mediumblob"E c_longblob0         Rmysql_binary_typesb longblobz c_longblob: query.sqlO -!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlМ +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sql╜ +Ь +SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_types +CROSS JOIN mysql_datetime_typesGetMysqlFunctions:one" +max_int0         @bany"# + max_varchar0         @bany"% + max_timestamp0         @bany2 Functions : query.sqlМ 8SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1 GetAuthor:one"* id0         R authorsbbigintzid", name0         R authorsbtextzname"( diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index d8b3d712..4a6cea9b 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -5,7 +5,8 @@ using System.Text.Json; namespace MySqlConnectorExampleGen; -public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); +public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); +public readonly record struct MysqlDatetimeType(short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp); public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index 4deb21ec..ec6bb716 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -99,7 +99,7 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); public readonly record struct ListAuthorsArgs(int Limit, int Offset); public async Task> ListAuthors(ListAuthorsArgs args) @@ -164,10 +164,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -199,10 +196,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -272,7 +266,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) @@ -313,7 +307,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public readonly record struct DeleteAuthorArgs(string Name); public async Task DeleteAuthor(DeleteAuthorArgs args) { @@ -333,10 +327,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -364,10 +355,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllAuthorsSql; @@ -376,7 +364,7 @@ public async Task DeleteAllAuthors() } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public readonly record struct UpdateAuthorsArgs(string? Bio); public async Task UpdateAuthors(UpdateAuthorsArgs args) { @@ -394,10 +382,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -522,10 +507,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -575,7 +557,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); public async Task> GetDuplicateAuthors() { @@ -613,7 +595,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); public readonly record struct GetAuthorsByBookNameArgs(string Name); public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) @@ -677,10 +659,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateExtendedBioSql; @@ -771,10 +750,7 @@ public async Task TruncateExtendedBios() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncateExtendedBiosSql; @@ -783,8 +759,8 @@ public async Task TruncateExtendedBios() } } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; - public readonly record struct InsertMysqlTypesArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public readonly record struct InsertMysqlTypesArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { if (this.Transaction == null) @@ -821,10 +797,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -833,10 +805,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertMysqlTypesSql; @@ -868,15 +837,11 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public readonly record struct InsertMysqlTypesBatchArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + public readonly record struct InsertMysqlTypesBatchArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); public async Task InsertMysqlTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; @@ -910,7 +875,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -928,14 +892,14 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; - public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; + public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); public async Task GetMysqlTypes() { if (this.Transaction == null) @@ -966,23 +930,18 @@ public async Task InsertMysqlTypesBatch(List args) CFixed = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? null : reader.GetDouble(13), CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), + CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), + CText = reader.IsDBNull(21) ? null : reader.GetString(21), + CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CJson = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), + CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), + CEnum = reader.IsDBNull(25) ? null : reader.GetString(25).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() }; } } @@ -1022,23 +981,18 @@ public async Task InsertMysqlTypesBatch(List args) CFixed = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? null : reader.GetDouble(13), CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), + CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), + CText = reader.IsDBNull(21) ? null : reader.GetString(21), + CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CJson = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), + CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), + CEnum = reader.IsDBNull(25) ? null : reader.GetString(25).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() }; } } @@ -1047,8 +1001,8 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; - public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); public async Task GetMysqlTypesCnt() { if (this.Transaction == null) @@ -1091,11 +1045,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(28) ? null : reader.GetInt16(28), - CDate = reader.IsDBNull(29) ? null : reader.GetDateTime(29), - CDatetime = reader.IsDBNull(30) ? null : reader.GetDateTime(30), - CTimestamp = reader.IsDBNull(31) ? null : reader.GetDateTime(31) + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() }; } } @@ -1147,11 +1097,7 @@ public async Task InsertMysqlTypesBatch(List args) CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(28) ? null : reader.GetInt16(28), - CDate = reader.IsDBNull(29) ? null : reader.GetDateTime(29), - CDatetime = reader.IsDBNull(30) ? null : reader.GetDateTime(30), - CTimestamp = reader.IsDBNull(31) ? null : reader.GetDateTime(31) + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() }; } } @@ -1160,26 +1106,138 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public readonly record struct GetMysqlFunctionsRow(int? MaxInt, string? MaxVarchar, DateTime MaxTimestamp); - public async Task GetMysqlFunctions() + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + public readonly record struct InsertMysqlDatetimeTypesArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(InsertMysqlDatetimeTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlDatetimeTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public readonly record struct InsertMysqlDatetimeTypesBatchArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + public readonly record struct GetMysqlDatetimeTypesRow(short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp); + public async Task GetMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlDatetimeTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlDatetimeTypesRow { - MaxInt = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CYear = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? null : reader.GetDateTime(1), + CTime = reader.IsDBNull(2) ? null : reader.GetString(2), + CDatetime = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? null : reader.GetDateTime(4) }; } } @@ -1196,17 +1254,19 @@ public async Task InsertMysqlTypesBatch(List args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlFunctionsSql; + command.CommandText = GetMysqlDatetimeTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlDatetimeTypesRow { - MaxInt = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CYear = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? null : reader.GetDateTime(1), + CTime = reader.IsDBNull(2) ? null : reader.GetString(2), + CDatetime = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? null : reader.GetDateTime(4) }; } } @@ -1215,21 +1275,35 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + public readonly record struct GetMysqlDatetimeTypesCntRow(long Cnt, short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + public async Task GetMysqlDatetimeTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + using (var command = new MySqlCommand(GetMysqlDatetimeTypesCntSql, connection)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? null : reader.GetDateTime(4) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -1239,13 +1313,55 @@ public async Task TruncateMysqlTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlTypesSql; + command.CommandText = GetMysqlDatetimeTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? null : reader.GetDateTime(4) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlDatetimeTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlDatetimeTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; public readonly record struct InsertMysqlBinaryTypesArgs(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) { @@ -1271,10 +1387,7 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertMysqlBinaryTypesSql; @@ -1402,7 +1515,7 @@ public async Task InsertMysqlBinaryTypesBatch(List GetMysqlBinaryTypesCnt() { @@ -1484,6 +1597,45 @@ public async Task TruncateMysqlBinaryTypes() return; } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + public readonly record struct GetMysqlFunctionsRow(int? MaxInt, string? MaxVarchar, DateTime MaxTimestamp); + public async Task GetMysqlFunctions() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } + } + } + + return null; + } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); @@ -1491,9 +1643,22 @@ public async Task TruncateMysqlBinaryTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlBinaryTypesSql; + command.CommandText = GetMysqlFunctionsSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } + + return null; } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index 2eb46a35..b3e5fb1a 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -181,173 +181,180 @@ } }, { - "name": "c_year", + "name": "c_char", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "year" + "name": "char" } }, { - "name": "c_date", + "name": "c_nchar", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "date" + "name": "char" } }, { - "name": "c_time", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "time" + "name": "char" } }, { - "name": "c_datetime", - "length": 19, + "name": "c_varchar", + "length": 100, "table": { "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "varchar" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_tinytext", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "tinytext" } }, { - "name": "c_char", + "name": "c_mediumtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumtext" } }, { - "name": "c_nchar", + "name": "c_text", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "text" } }, { - "name": "c_national_char", + "name": "c_longtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "longtext" } }, { - "name": "c_varchar", - "length": 100, + "name": "c_json", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "json" } }, { - "name": "c_tinytext", + "name": "c_json_string_override", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "json" } }, { - "name": "c_mediumtext", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "mysql_types_c_enum" } }, { - "name": "c_text", - "length": -1, + "name": "c_set", + "length": 15, "table": { "name": "mysql_types" }, "type": { - "name": "text" + "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_datetime_types" + }, + "columns": [ { - "name": "c_longtext", + "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "longtext" + "name": "year" } }, { - "name": "c_json", + "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "date" } }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "time" } }, { - "name": "c_enum", - "length": 6, + "name": "c_datetime", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "datetime" } }, { - "name": "c_set", - "length": 15, + "name": "c_timestamp", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_set" + "name": "timestamp" } } ] @@ -1466,7 +1473,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ @@ -1874,66 +1881,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "comments": [ @@ -1945,7 +1892,7 @@ } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -2353,66 +2300,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "filename": "query.sql", @@ -2421,7 +2308,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -2590,61 +2477,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -2781,7 +2613,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -3090,12 +2922,171 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "comments": [ + " Datetime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ { "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "year" @@ -3106,18 +3097,29 @@ "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "date" }, "originalName": "c_date" }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + }, { "name": "c_datetime", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "datetime" @@ -3128,7 +3130,7 @@ "name": "c_timestamp", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "timestamp" @@ -3139,43 +3141,69 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_year", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "year" + }, + "originalName": "c_year" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_date", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -3598,6 +3626,44 @@ "name": "TruncateMysqlBinaryTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index 5283adaeade539646c904b44b2ddba222ed07d40..21a116ef0eeefc2d15e12c26b686e1727b42b52f 100644 GIT binary patch delta 1719 zcmX@Vk#Xq`#tko+B>svBu@#gikWFHfVyR3`ED|#2VoL_=H24n#j6rxb zfh-Zy0%-zSWx$0`El@>vs*n*E7sLjz24S2UxTH8>J`ghD;)YmIT#}euV1Qc_w-hJH zSg^9qXE{W~*`^9}eP`yHd`4^P<{AZ7#>w+_qBkol6|+yCtEI)-C(QMYdGi^qSrBnH z-5U1EYqX?U8u+_izi z`#4Nz+$`f1$~HOLD?sB1GgmQ}r(dvZP>6!3Ux>c~wwO@h(wLmCDkueJ|Qc1#>Bf1cmxJJBGL_ zgg83+xMGccA*qm}(!69~p@UNTOl}Ml-@MA}9XGqwUM8+Nj9inWJgcR+oP+#>gB85| fJ^j!On|$80hu4T(!3}6uNoIatvBKuwFefGetvOx< delta 718 zcmZ3wgYo=E#tko+B({nOu@#giTwKZVAZf6K@Z^sc!jdqqkTw@L zL`QK+Vs3$f@Z>;yVNQ@jFn{xURv#wj4rZ>&2mM7i>v8@TV|^^jwTET1p2BQKMxDv; zT>K{AS9G1E#IspNshFKd%Z@`v$O_~Lpd;(mG$zk>kWI$nE1&uW&Q?xJvMinqjL`{Z0N zrOkh=`XC` CSet { get; set; } }; + public class MysqlDatetimeType + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; public class MysqlBinaryType { public byte? CBit { get; set; } diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 78efe44f..305bc5ab 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -42,7 +42,7 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp ) VALUES ( @c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set, @c_year, @c_date, @c_datetime, @c_timestamp ) "; + private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; public class InsertMysqlTypesArgs { public bool? CBool { get; set; } @@ -72,10 +72,6 @@ public class InsertMysqlTypesArgs public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) { @@ -113,10 +109,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -125,10 +117,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertMysqlTypesSql; @@ -160,10 +149,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -197,10 +182,6 @@ public class InsertMysqlTypesBatchArgs public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task InsertMysqlTypesBatch(List args) { @@ -234,7 +215,6 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -252,13 +232,13 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set", "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; + private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; public class GetMysqlTypesRow { public bool? CBool { get; set; } @@ -276,11 +256,6 @@ public class GetMysqlTypesRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public string CTime { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } public string CChar { get; set; } public string CNchar { get; set; } public string CNationalChar { get; set; } @@ -324,23 +299,18 @@ public async Task GetMysqlTypes() CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), + CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), + CText = reader.IsDBNull(21) ? null : reader.GetString(21), + CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CJson = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), + CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), + CEnum = reader.IsDBNull(25) ? (MysqlTypesCEnum? )null : reader.GetString(25).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() }; } } @@ -380,23 +350,18 @@ public async Task GetMysqlTypes() CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CYear = reader.IsDBNull(15) ? (short? )null : reader.GetInt16(15), - CDate = reader.IsDBNull(16) ? (DateTime? )null : reader.GetDateTime(16), - CTime = reader.IsDBNull(17) ? null : reader.GetString(17), - CDatetime = reader.IsDBNull(18) ? (DateTime? )null : reader.GetDateTime(18), - CTimestamp = reader.IsDBNull(19) ? (DateTime? )null : reader.GetDateTime(19), - CChar = reader.IsDBNull(20) ? null : reader.GetString(20), - CNchar = reader.IsDBNull(21) ? null : reader.GetString(21), - CNationalChar = reader.IsDBNull(22) ? null : reader.GetString(22), - CVarchar = reader.IsDBNull(23) ? null : reader.GetString(23), - CTinytext = reader.IsDBNull(24) ? null : reader.GetString(24), - CMediumtext = reader.IsDBNull(25) ? null : reader.GetString(25), - CText = reader.IsDBNull(26) ? null : reader.GetString(26), - CLongtext = reader.IsDBNull(27) ? null : reader.GetString(27), - CJson = reader.IsDBNull(28) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(28)), - CJsonStringOverride = reader.IsDBNull(29) ? null : reader.GetString(29), - CEnum = reader.IsDBNull(30) ? (MysqlTypesCEnum? )null : reader.GetString(30).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(31) ? null : reader.GetString(31).ToMysqlTypesCSetSet() + CChar = reader.IsDBNull(15) ? null : reader.GetString(15), + CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), + CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), + CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), + CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), + CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), + CText = reader.IsDBNull(21) ? null : reader.GetString(21), + CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), + CJson = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), + CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), + CEnum = reader.IsDBNull(25) ? (MysqlTypesCEnum? )null : reader.GetString(25).ToMysqlTypesCEnum(), + CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() }; } } @@ -405,7 +370,7 @@ public async Task GetMysqlTypes() return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp FROM mysql_types GROUP BY c_bool , c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set, c_year, c_date, c_datetime, c_timestamp LIMIT 1 "; + private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; public class GetMysqlTypesCntRow { public long Cnt { get; set; } @@ -436,10 +401,6 @@ public class GetMysqlTypesCntRow public string CJsonStringOverride { get; set; } public MysqlTypesCEnum? CEnum { get; set; } public HashSet CSet { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } }; public async Task GetMysqlTypesCnt() { @@ -483,11 +444,7 @@ public async Task GetMysqlTypesCnt() CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(28) ? (short? )null : reader.GetInt16(28), - CDate = reader.IsDBNull(29) ? (DateTime? )null : reader.GetDateTime(29), - CDatetime = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CTimestamp = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31) + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() }; } } @@ -539,11 +496,7 @@ public async Task GetMysqlTypesCnt() CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet(), - CYear = reader.IsDBNull(28) ? (short? )null : reader.GetInt16(28), - CDate = reader.IsDBNull(29) ? (DateTime? )null : reader.GetDateTime(29), - CDatetime = reader.IsDBNull(30) ? (DateTime? )null : reader.GetDateTime(30), - CTimestamp = reader.IsDBNull(31) ? (DateTime? )null : reader.GetDateTime(31) + CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() }; } } @@ -552,31 +505,157 @@ public async Task GetMysqlTypesCnt() return null; } - private const string GetMysqlFunctionsSql = "SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types "; - public class GetMysqlFunctionsRow + private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; + public async Task TruncateMysqlTypes() { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + public class InsertMysqlDatetimeTypesArgs + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } }; - public async Task GetMysqlFunctions() + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + using (var command = new MySqlCommand(InsertMysqlDatetimeTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlDatetimeTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public class InsertMysqlDatetimeTypesBatchArgs + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + public class GetMysqlDatetimeTypesRow + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public string CTime { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task GetMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlDatetimeTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlDatetimeTypesRow { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), + CTime = reader.IsDBNull(2) ? null : reader.GetString(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) }; } } @@ -593,17 +672,19 @@ public async Task GetMysqlFunctions() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlFunctionsSql; + command.CommandText = GetMysqlDatetimeTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlDatetimeTypesRow { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), + CTime = reader.IsDBNull(2) ? null : reader.GetString(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) }; } } @@ -612,21 +693,42 @@ public async Task GetMysqlFunctions() return null; } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + public class GetMysqlDatetimeTypesCntRow + { + public long Cnt { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + }; + public async Task GetMysqlDatetimeTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + using (var command = new MySqlCommand(GetMysqlDatetimeTypesCntSql, connection)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) @@ -636,13 +738,55 @@ public async Task TruncateMysqlTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlTypesSql; + command.CommandText = GetMysqlDatetimeTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlDatetimeTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlDatetimeTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES ( @c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob ) "; + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; public class InsertMysqlBinaryTypesArgs { public byte? CBit { get; set; } @@ -677,10 +821,7 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertMysqlBinaryTypesSql; @@ -825,7 +966,7 @@ public async Task GetMysqlBinaryTypes() return null; } - private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit , c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1 "; + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; public class GetMysqlBinaryTypesCntRow { public long Cnt { get; set; } @@ -917,6 +1058,50 @@ public async Task TruncateMysqlBinaryTypes() return; } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlBinaryTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + public class GetMysqlFunctionsRow + { + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } + }; + public async Task GetMysqlFunctions() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } + } + } + + return null; + } + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); @@ -924,10 +1109,23 @@ public async Task TruncateMysqlBinaryTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlBinaryTypesSql; + command.CommandText = GetMysqlFunctionsSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlFunctionsRow + { + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) + }; + } + } } + + return null; } private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; @@ -996,7 +1194,7 @@ public async Task GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public long Id { get; set; } @@ -1075,10 +1273,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -1114,10 +1309,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1195,7 +1387,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public long Id { get; set; } @@ -1244,7 +1436,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -1267,10 +1459,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -1298,10 +1487,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllAuthorsSql; @@ -1310,7 +1496,7 @@ public async Task DeleteAllAuthors() } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -1331,10 +1517,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -1480,10 +1663,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -1537,7 +1717,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -1579,7 +1759,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public long Id { get; set; } @@ -1658,10 +1838,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateExtendedBioSql; @@ -1761,10 +1938,7 @@ public async Task TruncateExtendedBios() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncateExtendedBiosSql; diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index f6db1eaa..22eb9237 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -181,173 +181,180 @@ } }, { - "name": "c_year", + "name": "c_char", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "year" + "name": "char" } }, { - "name": "c_date", + "name": "c_nchar", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "date" + "name": "char" } }, { - "name": "c_time", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "time" + "name": "char" } }, { - "name": "c_datetime", - "length": 19, + "name": "c_varchar", + "length": 100, "table": { "name": "mysql_types" }, "type": { - "name": "datetime" + "name": "varchar" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_tinytext", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "timestamp" + "name": "tinytext" } }, { - "name": "c_char", + "name": "c_mediumtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "mediumtext" } }, { - "name": "c_nchar", + "name": "c_text", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "text" } }, { - "name": "c_national_char", + "name": "c_longtext", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "char" + "name": "longtext" } }, { - "name": "c_varchar", - "length": 100, + "name": "c_json", + "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "varchar" + "name": "json" } }, { - "name": "c_tinytext", + "name": "c_json_string_override", "length": -1, "table": { "name": "mysql_types" }, "type": { - "name": "tinytext" + "name": "json" } }, { - "name": "c_mediumtext", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "name": "mysql_types" }, "type": { - "name": "mediumtext" + "name": "mysql_types_c_enum" } }, { - "name": "c_text", - "length": -1, + "name": "c_set", + "length": 15, "table": { "name": "mysql_types" }, "type": { - "name": "text" + "name": "mysql_types_c_set" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_datetime_types" + }, + "columns": [ { - "name": "c_longtext", + "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "longtext" + "name": "year" } }, { - "name": "c_json", + "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "date" } }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "json" + "name": "time" } }, { - "name": "c_enum", - "length": 6, + "name": "c_datetime", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "datetime" } }, { - "name": "c_set", - "length": 15, + "name": "c_timestamp", + "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { - "name": "mysql_types_c_set" + "name": "timestamp" } } ] @@ -615,7 +622,7 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypes", "cmd": ":exec", "parameters": [ @@ -1023,66 +1030,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "comments": [ @@ -1094,7 +1041,7 @@ } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", "name": "InsertMysqlTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -1502,66 +1449,6 @@ }, "originalName": "c_set" } - }, - { - "number": 28, - "column": { - "name": "c_year", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - } - }, - { - "number": 29, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 30, - "column": { - "name": "c_datetime", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - } - }, - { - "number": 31, - "column": { - "name": "c_timestamp", - "length": 19, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - } } ], "filename": "query.sql", @@ -1570,7 +1457,7 @@ } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", "name": "GetMysqlTypes", "cmd": ":one", "columns": [ @@ -1739,61 +1626,6 @@ }, "originalName": "c_double_precision" }, - { - "name": "c_year", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "year" - }, - "originalName": "c_year" - }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_datetime", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "datetime" - }, - "originalName": "c_datetime" - }, - { - "name": "c_timestamp", - "length": 19, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, { "name": "c_char", "length": -1, @@ -1930,7 +1762,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", "name": "GetMysqlTypesCnt", "cmd": ":one", "columns": [ @@ -2239,12 +2071,171 @@ "name": "mysql_types_c_set" }, "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_types", + "name": "TruncateMysqlTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "comments": [ + " Datetime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_year", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + } + }, + { + "number": 2, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ { "name": "c_year", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "year" @@ -2255,18 +2246,29 @@ "name": "c_date", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "date" }, "originalName": "c_date" }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + }, { "name": "c_datetime", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "datetime" @@ -2277,7 +2279,7 @@ "name": "c_timestamp", "length": 19, "table": { - "name": "mysql_types" + "name": "mysql_datetime_types" }, "type": { "name": "timestamp" @@ -2288,43 +2290,69 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_year", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "year" + }, + "originalName": "c_year" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_date", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_datetime_types" + }, "type": { - "name": "any" - } + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -2748,6 +2776,44 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" + }, { "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", "name": "GetAuthor", diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index d7d2f630..d4107f93 100644 --- a/examples/MySqlConnectorLegacyExample/request.message +++ b/examples/MySqlConnectorLegacyExample/request.message @@ -2,7 +2,7 @@ ° 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╔ $examples/MySqlConnectorLegacyExamplecsharpВ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner╡public"╦public═ +./dist/LocalRunner¤public"Уpublic·  mysql_types$ c_bool0R  mysql_typesb tinyint' c_boolean0R  mysql_typesb tinyint' @@ -25,14 +25,6 @@ R  mysql_typesb decimal% R  mysql_typesb decimal. c_double0         R  mysql_typesbdouble8 c_double_precision0         R  mysql_typesbdouble* -c_year0         R  mysql_typesbyear* -c_date0         R  mysql_typesbdate! -c_time0 -R  mysql_typesbtime) - -c_datetime0R  mysql_typesb -datetime+ - c_timestamp0R  mysql_typesb  timestamp* c_char0         R  mysql_typesbchar+ c_nchar0         R  mysql_typesbchar3 c_national_char0         R  mysql_typesbchar' @@ -49,7 +41,16 @@ c_longtext0 c_json0         R  mysql_typesbjson: c_json_string_override0         R  mysql_typesbjson/ c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_setИ +c_set0R  mysql_typesbmysql_types_c_setШ +mysql_datetime_types3 +c_year0         Rmysql_datetime_typesbyear3 +c_date0         Rmysql_datetime_typesbdate* +c_time0 +Rmysql_datetime_typesbtime2 + +c_datetime0Rmysql_datetime_typesb +datetime4 + c_timestamp0Rmysql_datetime_typesb  timestampИ mysql_binary_types& c_bit0Rmysql_binary_typesbbit, c_binary0Rmysql_binary_typesbbinary2 @@ -82,8 +83,8 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_typeу -╝ + author_type0Rextendedbiosbbios_author_typeХ +ў INSERT INTO mysql_types ( c_bool, @@ -106,13 +107,9 @@ INSERT INTO mysql_types c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 c_bool0Rpublic mysql_typesb tinyintzc_bool*>: c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF @@ -154,15 +151,8 @@ c_longtext*>: c_json0         Rpublic mysql_typesbjsonzc_json*^Z c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= - -c_datetime0Rpublic mysql_typesb -datetimez -c_datetime*D@ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp2 Basic types : query.sqlB  mysql_types▄ -╗INSERT INTO mysql_types +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set2 Basic types : query.sqlB  mysql_typesО +ЎINSERT INTO mysql_types ( c_bool, c_boolean, @@ -184,13 +174,9 @@ c_datetime*D@ c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 c_bool0Rpublic mysql_typesb tinyintzc_bool*>: c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF @@ -232,15 +218,8 @@ c_longtext*>: c_json0         Rpublic mysql_typesbjsonzc_json*^Z c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set*>: -c_year0         Rpublic mysql_typesbyearzc_year*>: -c_date0         Rpublic mysql_typesbdatezc_date*A= - -c_datetime0Rpublic mysql_typesb -datetimez -c_datetime*D@ - c_timestamp0Rpublic mysql_typesb  timestampz c_timestamp: query.sqlB  mysql_types╡ -∙SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_year, c_date, c_time, c_datetime, c_timestamp, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", +c_set0Rpublic mysql_typesbmysql_types_c_setzc_set: query.sqlB  mysql_typesА +╚SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", c_bool0R  mysql_typesb tinyintzc_bool"2 c_boolean0R  mysql_typesb tinyintz c_boolean"2 c_tinyint0R  mysql_typesb tinyintz c_tinyint"> @@ -263,15 +242,6 @@ R  mysql_typesb decimalz c_numeric". R  mysql_typesb decimalzc_fixed"8 c_double0         R  mysql_typesbdoublezc_double"L c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_year0         R  mysql_typesbyearzc_year"2 -c_date0         R  mysql_typesbdatezc_date") -c_time0 -R  mysql_typesbtimezc_time"5 - -c_datetime0R  mysql_typesb -datetimez -c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp"2 c_char0         R  mysql_typesbcharzc_char"4 c_nchar0         R  mysql_typesbcharzc_nchar"D c_national_char0         R  mysql_typesbcharzc_national_char"2 @@ -290,8 +260,8 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlї -┴SELECT +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlк +╧SELECT COUNT(*) AS cnt, c_bool, c_boolean, @@ -319,11 +289,7 @@ c_longtext"2 c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set FROM mysql_types GROUP BY c_bool, @@ -346,11 +312,7 @@ GROUP BY c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set LIMIT 1GetMysqlTypesCnt:one" cnt0         @bbigint", c_bool0R  mysql_typesb tinyintzc_bool"2 @@ -393,23 +355,71 @@ c_longtext"2 c_json0         R  mysql_typesbjsonzc_json"R c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set"2 -c_year0         R  mysql_typesbyearzc_year"2 -c_date0         R  mysql_typesbdatezc_date"5 +c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql° +s +INSERT INTO mysql_datetime_types +( + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC +c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC +c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF -c_datetime0R  mysql_typesb +c_datetime0Rpublicmysql_datetime_typesb datetimez -c_datetime"8 - c_timestamp0R  mysql_typesb  timestampz c_timestamp: query.sqlО -{SELECT - MAX(c_int) AS max_int, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM mysql_typesGetMysqlFunctions:one" -max_int0         @bany"# - max_varchar0         @bany"% - max_timestamp0         @bany: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sqlУ +c_datetime*MI + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp2 Datetime types : query.sqlBmysql_datetime_typesю +rINSERT INTO mysql_datetime_types +( + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC +c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC +c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF + +c_datetime0Rpublicmysql_datetime_typesb +datetimez +c_datetime*MI + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp: query.sqlBmysql_datetime_types│ +XSELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"2 +c_time0 +Rmysql_datetime_typesbtimezc_time"> + +c_datetime0Rmysql_datetime_typesb +datetimez +c_datetime"A + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlБ +╢SELECT + COUNT(*) AS cnt, + c_year, + c_date, + c_datetime, + c_timestamp +FROM mysql_datetime_types +GROUP BY + c_year, + c_date, + c_datetime, + c_timestamp +LIMIT 1GetMysqlDatetimeTypesCnt:one" +cnt0         @bbigint"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"> + +c_datetime0Rmysql_datetime_typesb +datetimez +c_datetime"A + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlS +#TRUNCATE TABLE mysql_datetime_typesTruncateMysqlDatetimeTypes:exec: query.sqlУ о INSERT INTO mysql_binary_types ( @@ -514,7 +524,17 @@ mediumblobz c_mediumblob"E c_longblob0         Rmysql_binary_typesb longblobz c_longblob: query.sqlO -!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlМ +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sql╜ +Ь +SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_types +CROSS JOIN mysql_datetime_typesGetMysqlFunctions:one" +max_int0         @bany"# + max_varchar0         @bany"% + max_timestamp0         @bany2 Functions : query.sqlМ 8SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1 GetAuthor:one"* id0         R authorsbbigintzid", name0         R authorsbtextzname"( diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 4b2aca39..cfd18484 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -43,7 +43,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -103,18 +103,12 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } @@ -187,7 +181,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -235,7 +229,7 @@ public class GetPostgresTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; init; } @@ -282,7 +276,7 @@ public class GetPostgresTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; init; } @@ -314,22 +308,16 @@ public async Task TruncatePostgresTypes() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncatePostgresTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { public JsonElement? CJson { get; init; } @@ -351,22 +339,16 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; init; } @@ -401,22 +383,16 @@ public async Task TruncatePostgresUnstructuredTypes() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public class InsertPostgresArrayTypesArgs { public byte[]? CBytea { get; init; } @@ -440,18 +416,12 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } @@ -510,7 +480,7 @@ public async Task InsertPostgresArrayTypesBatch(List(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public required long Id { get; init; } @@ -800,20 +752,15 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public required long Id { get; init; } @@ -845,7 +792,7 @@ public class GetAuthorByIdArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public required long Id { get; init; } @@ -874,7 +821,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public required string Name { get; init; } @@ -886,18 +833,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -907,22 +848,16 @@ public async Task TruncateAuthors() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string? Bio { get; init; } @@ -934,20 +869,15 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; public class GetAuthorsByIdsRow { public required long Id { get; init; } @@ -976,7 +906,7 @@ public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; public class GetAuthorsByIdsAndNamesRow { public required long Id { get; init; } @@ -1025,20 +955,15 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public required Author? Author { get; init; } @@ -1079,7 +1004,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public required Author? Author { get; init; } @@ -1120,7 +1045,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public required long Id { get; init; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index ffcec1d6..0b315512 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -44,7 +44,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -104,18 +104,12 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } @@ -188,7 +182,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -236,7 +230,7 @@ public async Task GetPostgresTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -283,7 +277,7 @@ public async Task GetPostgresTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -315,22 +309,16 @@ public async Task TruncatePostgresTypes() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncatePostgresTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { public JsonElement? CJson { get; set; } @@ -352,22 +340,16 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; set; } @@ -402,22 +384,16 @@ public async Task TruncatePostgresUnstructuredTypes() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public class InsertPostgresArrayTypesArgs { public byte[] CBytea { get; set; } @@ -441,18 +417,12 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } @@ -511,7 +481,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetAuthor(GetAuthorArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public long Id { get; set; } @@ -801,20 +753,15 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public long Id { get; set; } @@ -846,7 +793,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public long Id { get; set; } @@ -875,7 +822,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -887,18 +834,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -908,22 +849,16 @@ public async Task TruncateAuthors() if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { await connection.ExecuteAsync(TruncateAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -935,20 +870,15 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; public class GetAuthorsByIdsRow { public long Id { get; set; } @@ -977,7 +907,7 @@ public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; public class GetAuthorsByIdsAndNamesRow { public long Id { get; set; } @@ -1026,20 +956,15 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -1080,7 +1005,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -1121,7 +1046,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public long Id { get; set; } diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index fc019632..c92fb856 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -40,7 +40,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { @@ -83,10 +83,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresTypesSql; @@ -164,7 +161,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { @@ -262,7 +259,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); public async Task GetPostgresTypesCnt() { @@ -358,7 +355,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() { @@ -429,10 +426,7 @@ public async Task TruncatePostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresTypesSql; @@ -441,7 +435,7 @@ public async Task TruncatePostgresTypes() } } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { @@ -465,10 +459,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresUnstructuredTypesSql; @@ -483,7 +474,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task GetPostgresUnstructuredTypes() { @@ -570,10 +561,7 @@ public async Task TruncatePostgresUnstructuredTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresUnstructuredTypesSql; @@ -582,7 +570,7 @@ public async Task TruncatePostgresUnstructuredTypes() } } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { @@ -607,10 +595,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresArrayTypesSql; @@ -710,7 +695,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() { @@ -779,10 +764,7 @@ public async Task TruncatePostgresArrayTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresArrayTypesSql; @@ -791,7 +773,7 @@ public async Task TruncatePostgresArrayTypes() } } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point , c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES ( @c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle ) "; + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; public readonly record struct InsertPostgresGeoTypesArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { @@ -816,10 +798,7 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresGeoTypesSql; @@ -942,10 +921,7 @@ public async Task TruncatePostgresGeoTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresGeoTypesSql; @@ -954,7 +930,7 @@ public async Task TruncatePostgresGeoTypes() } } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public readonly record struct GetAuthorRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorArgs(string Name); public async Task GetAuthor(GetAuthorArgs args) @@ -1011,7 +987,7 @@ public async Task TruncatePostgresGeoTypes() return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); public readonly record struct ListAuthorsArgs(int Offset, int Limit); public async Task> ListAuthors(ListAuthorsArgs args) @@ -1134,10 +1110,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1149,7 +1122,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public readonly record struct GetAuthorByIdRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorByIdArgs(long Id); public async Task GetAuthorById(GetAuthorByIdArgs args) @@ -1206,7 +1179,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) @@ -1246,7 +1219,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public readonly record struct DeleteAuthorArgs(string Name); public async Task DeleteAuthor(DeleteAuthorArgs args) { @@ -1265,10 +1238,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -1295,10 +1265,7 @@ public async Task TruncateAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncateAuthorsSql; @@ -1307,7 +1274,7 @@ public async Task TruncateAuthors() } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public readonly record struct UpdateAuthorsArgs(string? Bio); public async Task UpdateAuthors(UpdateAuthorsArgs args) { @@ -1324,10 +1291,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -1337,7 +1301,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; public readonly record struct GetAuthorsByIdsRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorsByIdsArgs(long[] LongArr1); public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) @@ -1377,7 +1341,7 @@ public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs } } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; public readonly record struct GetAuthorsByIdsAndNamesRow(long Id, string Name, string? Bio); public readonly record struct GetAuthorsByIdsAndNamesArgs(long[] LongArr1, string[] StringArr2); public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) @@ -1439,10 +1403,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -1454,7 +1415,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); public async Task> ListAllAuthorsBooks() { @@ -1491,7 +1452,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); public async Task> GetDuplicateAuthors() { @@ -1528,7 +1489,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); public readonly record struct GetAuthorsByBookNameArgs(string Name); public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index fcb9e18b..39f25c86 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -41,7 +41,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean , c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr :: macaddr, @c_macaddr8 :: macaddr8 ) "; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -111,10 +111,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresTypesSql; @@ -217,7 +214,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1 "; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -342,7 +339,7 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint , c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1 "; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -464,7 +461,7 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types "; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -540,10 +537,7 @@ public async Task TruncatePostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresTypesSql; @@ -552,7 +546,7 @@ public async Task TruncatePostgresTypes() } } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json , c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json :: json, @c_json_string_override :: json, @c_jsonb :: jsonb, @c_jsonpath :: jsonpath, @c_xml :: xml, @c_xml_string_override :: xml ) "; + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { public JsonElement? CJson { get; set; } @@ -584,10 +578,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresUnstructuredTypesSql; @@ -602,7 +593,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1 "; + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; public class GetPostgresUnstructuredTypesRow { public JsonElement? CJson { get; set; } @@ -697,10 +688,7 @@ public async Task TruncatePostgresUnstructuredTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncatePostgresUnstructuredTypesSql; @@ -709,7 +697,7 @@ public async Task TruncatePostgresUnstructuredTypes() } } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea , c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES ( @c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array ) "; + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public class InsertPostgresArrayTypesArgs { public byte[] CBytea { get; set; } @@ -743,10 +731,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertPostgresArrayTypesSql; @@ -858,7 +843,7 @@ public async Task InsertPostgresArrayTypesBatch(List GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public long Id { get; set; } @@ -1347,10 +1323,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1362,7 +1335,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public long Id { get; set; } @@ -1427,7 +1400,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public long Id { get; set; } @@ -1475,7 +1448,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -1497,10 +1470,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -1527,10 +1497,7 @@ public async Task TruncateAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = TruncateAuthorsSql; @@ -1539,7 +1506,7 @@ public async Task TruncateAuthors() } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -1559,10 +1526,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -1572,7 +1536,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) "; + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; public class GetAuthorsByIdsRow { public long Id { get; set; } @@ -1620,7 +1584,7 @@ public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs } } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY ( @longArr_1 :: BIGINT [ ] ) AND name = ANY ( @stringArr_2 :: TEXT [ ] ) "; + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; public class GetAuthorsByIdsAndNamesRow { public long Id { get; set; } @@ -1698,10 +1662,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -1713,7 +1674,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -1754,7 +1715,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -1795,7 +1756,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public long Id { get; set; } diff --git a/examples/SqliteDapperExample/QuerySql.cs b/examples/SqliteDapperExample/QuerySql.cs index 0ccac6e2..675d0bd1 100644 --- a/examples/SqliteDapperExample/QuerySql.cs +++ b/examples/SqliteDapperExample/QuerySql.cs @@ -37,7 +37,7 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { public required int Id { get; init; } @@ -69,7 +69,7 @@ public class GetAuthorArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public required int Id { get; init; } @@ -116,18 +116,12 @@ public async Task CreateAuthor(CreateAuthorArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -149,20 +143,15 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public required int Id { get; init; } @@ -228,7 +217,7 @@ public class GetAuthorByIdWithMultipleNamedParamArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdWithMultipleNamedParamSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public required int Id { get; init; } @@ -257,7 +246,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string? Bio { get; init; } @@ -269,16 +258,11 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } @@ -350,7 +334,7 @@ public async Task> GetAuthorsByIdsAndNames(GetA return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public required string Name { get; init; } @@ -362,18 +346,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -395,20 +373,15 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public required Author? Author { get; init; } @@ -450,7 +423,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public required Author? Author { get; init; } @@ -492,7 +465,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public required int Id { get; init; } @@ -548,22 +521,16 @@ public async Task DeleteAllAuthors() if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; public class InsertSqliteTypesArgs { public int? CInteger { get; init; } @@ -581,18 +548,12 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); } @@ -650,7 +611,7 @@ public class GetSqliteTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; public class GetSqliteTypesCntRow { public int? CInteger { get; init; } @@ -678,7 +639,7 @@ public class GetSqliteTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; public class GetSqliteFunctionsRow { public int? MaxInteger { get; init; } @@ -710,18 +671,12 @@ public async Task DeleteAllSqliteTypes() if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/SqliteDapperLegacyExample/QuerySql.cs b/examples/SqliteDapperLegacyExample/QuerySql.cs index 0ac7dcc2..3f03c335 100644 --- a/examples/SqliteDapperLegacyExample/QuerySql.cs +++ b/examples/SqliteDapperLegacyExample/QuerySql.cs @@ -38,7 +38,7 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction Transaction { get; } private string ConnectionString { get; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { public int Id { get; set; } @@ -70,7 +70,7 @@ public async Task GetAuthor(GetAuthorArgs args) return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public int Id { get; set; } @@ -117,18 +117,12 @@ public async Task CreateAuthor(CreateAuthorArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(CreateAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -150,20 +144,15 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public int Id { get; set; } @@ -229,7 +218,7 @@ public async Task GetAuthorByIdWithMulti return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdWithMultipleNamedParamSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public int Id { get; set; } @@ -258,7 +247,7 @@ public async Task> GetAuthorByNamePattern(GetAut return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -270,16 +259,11 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } @@ -351,7 +335,7 @@ public async Task> GetAuthorsByIdsAndNames(GetA return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -363,18 +347,12 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -396,20 +374,15 @@ public async Task CreateBook(CreateBookArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { return await connection.QuerySingleAsync(CreateBookSql, queryParams); - } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -451,7 +424,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -493,7 +466,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public int Id { get; set; } @@ -549,22 +522,16 @@ public async Task DeleteAllAuthors() if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllAuthorsSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; public class InsertSqliteTypesArgs { public int? CInteger { get; set; } @@ -582,18 +549,12 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); } @@ -651,7 +612,7 @@ public async Task GetSqliteTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; public class GetSqliteTypesCntRow { public int? CInteger { get; set; } @@ -679,7 +640,7 @@ public async Task GetSqliteTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; public class GetSqliteFunctionsRow { public int? MaxInteger { get; set; } @@ -711,18 +672,12 @@ public async Task DeleteAllSqliteTypes() if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) - { await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - } - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); } } diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index ec59608d..6a8f9b22 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -34,7 +34,7 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public readonly record struct GetAuthorRow(int Id, string Name, string? Bio); public readonly record struct GetAuthorArgs(string Name); public async Task GetAuthor(GetAuthorArgs args) @@ -92,7 +92,7 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public readonly record struct ListAuthorsRow(int Id, string Name, string? Bio); public readonly record struct ListAuthorsArgs(int Offset, int Limit); public async Task> ListAuthors(ListAuthorsArgs args) @@ -157,10 +157,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -193,10 +190,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -208,7 +202,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public readonly record struct GetAuthorByIdRow(int Id, string Name, string? Bio); public readonly record struct GetAuthorByIdArgs(int Id); public async Task GetAuthorById(GetAuthorByIdArgs args) @@ -326,7 +320,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public readonly record struct GetAuthorByNamePatternRow(int Id, string Name, string? Bio); public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) @@ -367,7 +361,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public readonly record struct UpdateAuthorsArgs(string? Bio); public async Task UpdateAuthors(UpdateAuthorsArgs args) { @@ -385,10 +379,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -493,7 +484,7 @@ public async Task> GetAuthorsByIdsAndNames(GetA } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public readonly record struct DeleteAuthorArgs(string Name); public async Task DeleteAuthor(DeleteAuthorArgs args) { @@ -513,10 +504,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -547,10 +535,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -562,7 +547,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); public async Task> ListAllAuthorsBooks() { @@ -600,7 +585,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); public async Task> GetDuplicateAuthors() { @@ -638,7 +623,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public readonly record struct GetAuthorsByBookNameRow(int Id, string Name, string? Bio, Book? Book); public readonly record struct GetAuthorsByBookNameArgs(string Name); public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) @@ -697,10 +682,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllAuthorsSql; @@ -709,7 +691,7 @@ public async Task DeleteAllAuthors() } } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) { @@ -732,10 +714,7 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertSqliteTypesSql; @@ -827,7 +806,7 @@ public async Task InsertSqliteTypesBatch(List args) return null; } - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); public async Task GetSqliteTypesCnt() { @@ -886,7 +865,7 @@ public async Task InsertSqliteTypesBatch(List args) return null; } - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); public async Task GetSqliteFunctions() { @@ -959,10 +938,7 @@ public async Task DeleteAllSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllSqliteTypesSql; diff --git a/examples/SqliteLegacyExample/QuerySql.cs b/examples/SqliteLegacyExample/QuerySql.cs index 688f29d7..aefe8bb2 100644 --- a/examples/SqliteLegacyExample/QuerySql.cs +++ b/examples/SqliteLegacyExample/QuerySql.cs @@ -35,7 +35,7 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction Transaction { get; } private string ConnectionString { get; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1 "; + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { public int Id { get; set; } @@ -101,7 +101,7 @@ public async Task GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset "; + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; public class ListAuthorsRow { public int Id { get; set; } @@ -180,10 +180,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -223,10 +220,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -238,7 +232,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1 "; + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; public class GetAuthorByIdRow { public int Id { get; set; } @@ -373,7 +367,7 @@ public async Task GetAuthorByIdWithMulti return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE ( @name_pattern , '%' ) "; + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; public class GetAuthorByNamePatternRow { public int Id { get; set; } @@ -422,7 +416,7 @@ public async Task> GetAuthorByNamePattern(GetAut } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL "; + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; public class UpdateAuthorsArgs { public string Bio { get; set; } @@ -443,10 +437,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; @@ -568,7 +559,7 @@ public async Task> GetAuthorsByIdsAndNames(GetA } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name "; + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; public class DeleteAuthorArgs { public string Name { get; set; } @@ -591,10 +582,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAuthorSql; @@ -632,10 +620,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -647,7 +632,7 @@ public async Task CreateBook(CreateBookArgs args) } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id ORDER BY authors . name "; + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; public class ListAllAuthorsBooksRow { public Author Author { get; set; } @@ -689,7 +674,7 @@ public async Task> ListAllAuthorsBooks() } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1 . name = authors2 . name WHERE authors1 . id < authors2 . id "; + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; public class GetDuplicateAuthorsRow { public Author Author { get; set; } @@ -731,7 +716,7 @@ public async Task> GetDuplicateAuthors() } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors . id = books . author_id WHERE books . name = @name "; + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; public class GetAuthorsByBookNameRow { public int Id { get; set; } @@ -799,10 +784,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllAuthorsSql; @@ -811,7 +793,7 @@ public async Task DeleteAllAuthors() } } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob ) "; + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; public class InsertSqliteTypesArgs { public int? CInteger { get; set; } @@ -840,10 +822,7 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = InsertSqliteTypesSql; @@ -946,7 +925,7 @@ public async Task GetSqliteTypes() return null; } - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer , c_real, c_text, c_blob LIMIT 1 "; + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; public class GetSqliteTypesCntRow { public int? CInteger { get; set; } @@ -1012,7 +991,7 @@ public async Task GetSqliteTypesCnt() return null; } - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite "; + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; public class GetSqliteFunctionsRow { public int? MaxInteger { get; set; } @@ -1090,10 +1069,7 @@ public async Task DeleteAllSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = DeleteAllSqliteTypesSql; diff --git a/examples/config/mysql/types/query.sql b/examples/config/mysql/types/query.sql index 269c60ab..c2ec823e 100644 --- a/examples/config/mysql/types/query.sql +++ b/examples/config/mysql/types/query.sql @@ -23,13 +23,9 @@ INSERT INTO mysql_types c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); -- name: InsertMysqlTypesBatch :copyfrom INSERT INTO mysql_types @@ -54,13 +50,9 @@ INSERT INTO mysql_types c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); -- name: GetMysqlTypes :one SELECT * FROM mysql_types LIMIT 1; @@ -94,11 +86,7 @@ SELECT c_json, c_json_string_override, c_enum, - c_set, - c_year, - c_date, - c_datetime, - c_timestamp + c_set FROM mysql_types GROUP BY c_bool, @@ -121,22 +109,54 @@ GROUP BY c_json, c_json_string_override, c_enum, - c_set, + c_set +LIMIT 1; + +-- name: TruncateMysqlTypes :exec +TRUNCATE TABLE mysql_types; + +/* Datetime types */ + +-- name: InsertMysqlDatetimeTypes :exec +INSERT INTO mysql_datetime_types +( c_year, c_date, c_datetime, c_timestamp -LIMIT 1; +) +VALUES (?, ?, ?, ?); --- name: GetMysqlFunctions :one +-- name: InsertMysqlDatetimeTypesBatch :copyfrom +INSERT INTO mysql_datetime_types +( + c_year, + c_date, + c_datetime, + c_timestamp +) +VALUES (?, ?, ?, ?); + +-- name: GetMysqlDatetimeTypes :one +SELECT * FROM mysql_datetime_types LIMIT 1; + +-- name: GetMysqlDatetimeTypesCnt :one SELECT - MAX(c_int) AS max_int, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM mysql_types; + COUNT(*) AS cnt, + c_year, + c_date, + c_datetime, + c_timestamp +FROM mysql_datetime_types +GROUP BY + c_year, + c_date, + c_datetime, + c_timestamp +LIMIT 1; --- name: TruncateMysqlTypes :exec -TRUNCATE TABLE mysql_types; +-- name: TruncateMysqlDatetimeTypes :exec +TRUNCATE TABLE mysql_datetime_types; /* Binary types */ @@ -192,3 +212,13 @@ LIMIT 1; -- name: TruncateMysqlBinaryTypes :exec TRUNCATE TABLE mysql_binary_types; + +/* Functions */ + +-- name: GetMysqlFunctions :one +SELECT + MAX(c_int) AS max_int, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM mysql_types +CROSS JOIN mysql_datetime_types; diff --git a/examples/config/mysql/types/schema.sql b/examples/config/mysql/types/schema.sql index 6d21f4ee..a6130d86 100644 --- a/examples/config/mysql/types/schema.sql +++ b/examples/config/mysql/types/schema.sql @@ -16,13 +16,6 @@ CREATE TABLE mysql_types ( c_double DOUBLE, c_double_precision DOUBLE PRECISION, - /* Datetime data types */ - c_year YEAR, - c_date DATE, - c_time TIME, - c_datetime DATETIME, - c_timestamp TIMESTAMP, - /* String data types */ c_char CHAR, c_nchar NCHAR, @@ -40,6 +33,14 @@ CREATE TABLE mysql_types ( c_set SET ('tea', 'coffee', 'milk') ); +CREATE TABLE mysql_datetime_types ( + c_year YEAR, + c_date DATE, + c_time TIME, + c_datetime DATETIME, + c_timestamp TIMESTAMP +); + CREATE TABLE mysql_binary_types ( c_bit BIT(8), c_binary BINARY(3), From 8a3cb03c00de5a806a15e9609cae9cf802e64e50 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 16:00:36 +0200 Subject: [PATCH 09/33] fix: re-generate code --- .../MySqlConnectorDapperExample/QuerySql.cs | 22 +++++++++---------- .../QuerySql.cs | 22 +++++++++---------- examples/MySqlConnectorExample/QuerySql.cs | 2 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 2 +- examples/NpgsqlDapperExample/QuerySql.cs | 20 ++++++++--------- .../NpgsqlDapperLegacyExample/QuerySql.cs | 20 ++++++++--------- examples/NpgsqlExample/QuerySql.cs | 2 +- examples/NpgsqlLegacyExample/QuerySql.cs | 2 +- examples/SqliteDapperExample/QuerySql.cs | 10 ++++----- .../SqliteDapperLegacyExample/QuerySql.cs | 10 ++++----- examples/SqliteExample/QuerySql.cs | 2 +- examples/SqliteLegacyExample/QuerySql.cs | 2 +- 12 files changed, 58 insertions(+), 58 deletions(-) diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index 64e53aec..cc54cd54 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -128,7 +128,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -232,7 +232,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -247,7 +247,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } @@ -518,7 +518,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } @@ -566,7 +566,7 @@ public async Task TruncateExtendedBios() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } @@ -639,7 +639,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); } @@ -841,7 +841,7 @@ public async Task TruncateMysqlTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } @@ -868,7 +868,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); } @@ -992,7 +992,7 @@ public async Task TruncateMysqlDatetimeTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); } @@ -1025,7 +1025,7 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); } @@ -1159,7 +1159,7 @@ public async Task TruncateMysqlBinaryTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); } diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 3058db1a..122d6042 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -114,7 +114,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); } @@ -315,7 +315,7 @@ public async Task TruncateMysqlTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } @@ -342,7 +342,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); } @@ -466,7 +466,7 @@ public async Task TruncateMysqlDatetimeTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); } @@ -499,7 +499,7 @@ public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); } @@ -632,7 +632,7 @@ public async Task TruncateMysqlBinaryTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); } @@ -746,7 +746,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -850,7 +850,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -865,7 +865,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } @@ -1136,7 +1136,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } @@ -1184,7 +1184,7 @@ public async Task TruncateExtendedBios() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } } diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index ec6bb716..60bdab37 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -382,7 +382,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 305bc5ab..bfa43b46 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -1517,7 +1517,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index cfd18484..6fd7fe57 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -108,7 +108,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } @@ -313,7 +313,7 @@ public async Task TruncatePostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } @@ -344,7 +344,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } @@ -388,7 +388,7 @@ public async Task TruncatePostgresUnstructuredTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } @@ -421,7 +421,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } @@ -516,7 +516,7 @@ public async Task TruncatePostgresArrayTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } @@ -549,7 +549,7 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } @@ -631,7 +631,7 @@ public async Task TruncatePostgresGeoTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } @@ -838,7 +838,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -853,7 +853,7 @@ public async Task TruncateAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 0b315512..2913142c 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -109,7 +109,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } @@ -314,7 +314,7 @@ public async Task TruncatePostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } @@ -345,7 +345,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } @@ -389,7 +389,7 @@ public async Task TruncatePostgresUnstructuredTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } @@ -422,7 +422,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } @@ -517,7 +517,7 @@ public async Task TruncatePostgresArrayTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } @@ -550,7 +550,7 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } @@ -632,7 +632,7 @@ public async Task TruncatePostgresGeoTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } @@ -839,7 +839,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -854,7 +854,7 @@ public async Task TruncateAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index c92fb856..06d173fb 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -1291,7 +1291,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 39f25c86..4861b486 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -1526,7 +1526,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; diff --git a/examples/SqliteDapperExample/QuerySql.cs b/examples/SqliteDapperExample/QuerySql.cs index 675d0bd1..5ada63bb 100644 --- a/examples/SqliteDapperExample/QuerySql.cs +++ b/examples/SqliteDapperExample/QuerySql.cs @@ -121,7 +121,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -351,7 +351,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -526,7 +526,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } @@ -553,7 +553,7 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); } @@ -676,7 +676,7 @@ public async Task DeleteAllSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/SqliteDapperLegacyExample/QuerySql.cs b/examples/SqliteDapperLegacyExample/QuerySql.cs index 3f03c335..0572509f 100644 --- a/examples/SqliteDapperLegacyExample/QuerySql.cs +++ b/examples/SqliteDapperLegacyExample/QuerySql.cs @@ -122,7 +122,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -352,7 +352,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } @@ -527,7 +527,7 @@ public async Task DeleteAllAuthors() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } @@ -554,7 +554,7 @@ public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); } @@ -677,7 +677,7 @@ public async Task DeleteAllSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); } } diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index 6a8f9b22..c9d48511 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -379,7 +379,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; diff --git a/examples/SqliteLegacyExample/QuerySql.cs b/examples/SqliteLegacyExample/QuerySql.cs index aefe8bb2..ba3995e1 100644 --- a/examples/SqliteLegacyExample/QuerySql.cs +++ b/examples/SqliteLegacyExample/QuerySql.cs @@ -437,7 +437,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = UpdateAuthorsSql; From ce97f5d9c3cc39229874a7028a0c91bbf244e6e7 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 16:12:28 +0200 Subject: [PATCH 10/33] fix: add missing mysql datetime tests --- Drivers/MySqlConnectorDriver.cs | 10 +- .../EndToEndScaffold/Templates/MySqlTests.cs | 32 +++++-- .../MySqlConnectorDapperTester.generated.cs | 26 ++++-- .../MySqlConnectorTester.generated.cs | 26 ++++-- .../MySqlConnectorDapperTester.generated.cs | 26 ++++-- .../MySqlConnectorTester.generated.cs | 26 ++++-- .../MySqlConnectorDapperExample/Models.cs | 2 +- .../MySqlConnectorDapperExample/QuerySql.cs | 15 ++- .../MySqlConnectorDapperExample/request.json | 87 +++++++++++++----- .../request.message | Bin 23605 -> 23841 bytes .../Models.cs | 2 +- .../QuerySql.cs | 15 ++- .../request.json | 87 +++++++++++++----- .../request.message | 54 ++++++----- examples/MySqlConnectorExample/Models.cs | 2 +- examples/MySqlConnectorExample/QuerySql.cs | 37 ++++---- examples/MySqlConnectorExample/request.json | 87 +++++++++++++----- .../MySqlConnectorExample/request.message | Bin 23589 -> 23825 bytes .../MySqlConnectorLegacyExample/Models.cs | 2 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 34 ++++--- .../MySqlConnectorLegacyExample/request.json | 87 +++++++++++++----- .../request.message | 54 ++++++----- examples/config/mysql/types/query.sql | 16 ++-- examples/config/mysql/types/schema.sql | 4 +- 24 files changed, 492 insertions(+), 239 deletions(-) diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 14ca92cc..42d9725d 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -101,7 +101,6 @@ public partial class MySqlConnectorDriver( { "longtext", new() }, { "mediumtext", new() }, { "text", new() }, - { "time", new() }, { "tinytext", new() }, { "varchar", new() }, { "var_string", new() }, @@ -117,7 +116,14 @@ public partial class MySqlConnectorDriver( { "datetime", new() }, { "timestamp", new() } }, - ordinal => $"reader.GetDateTime({ordinal})" + readerFn: ordinal => $"reader.GetDateTime({ordinal})" + ), + ["TimeSpan"] = new( + new() + { + { "time", new() } + }, + readerFn: ordinal => $"reader.GetFieldValue({ordinal})" ), /* Unstructured data types */ diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index fe1dbce0..92faf85c 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -176,25 +176,31 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow { Impl = $$""" [Test] - [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22")] - [TestCase(null, null, "1970-1-1 00:00:01")] + [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(null, null, null, null, null)] public async Task TestMySqlDateTimeTypes( short? cYear, DateTime? cDate, - DateTime? cTimestamp) + DateTime? cDatetime, + DateTime? cTimestamp, + TimeSpan? cTime) { await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }); var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); @@ -203,7 +209,9 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysql { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } """ @@ -494,14 +502,15 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes { Impl = $$""" [Test] - [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00")] - [TestCase(10, null, null, null, null)] + [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(10, null, null, null, null, null)] public async Task TestDateTimeCopyFrom( int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, - DateTime? cTimestamp) + DateTime? cTimestamp, + TimeSpan? cTime) { var batchArgs = Enumerable.Range(0, batchSize) .Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs @@ -509,7 +518,8 @@ public async Task TestDateTimeCopyFrom( CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }) .ToList(); await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); @@ -519,7 +529,8 @@ public async Task TestDateTimeCopyFrom( CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); @@ -531,6 +542,7 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMy Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } """ diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index c384a500..6d00c526 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -463,16 +463,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } [Test] - [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22")] - [TestCase(null, null, "1970-1-1 00:00:01")] - public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) + [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(null, null, null, null, null)] + public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }); var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); @@ -480,7 +482,9 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysql { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } @@ -753,11 +757,11 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } [Test] - [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00")] - [TestCase(10, null, null, null, null)] - public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) + [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(10, null, null, null, null, null)] + public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }).ToList(); await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { @@ -765,7 +769,8 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); @@ -776,6 +781,7 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMy Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 1d5f4e37..6f516cce 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -463,16 +463,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } [Test] - [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22")] - [TestCase(null, null, "1970-1-1 00:00:01")] - public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) + [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(null, null, null, null, null)] + public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }); var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual.Value); @@ -480,7 +482,9 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysql { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } @@ -753,11 +757,11 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } [Test] - [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00")] - [TestCase(10, null, null, null, null)] - public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) + [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(10, null, null, null, null, null)] + public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }).ToList(); await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { @@ -765,7 +769,8 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual.Value); @@ -776,6 +781,7 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMy Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 1d015348..22d5bf26 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -463,16 +463,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } [Test] - [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22")] - [TestCase(null, null, "1970-1-1 00:00:01")] - public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) + [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(null, null, null, null, null)] + public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }); var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); @@ -480,7 +482,9 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysql { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } @@ -753,11 +757,11 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } [Test] - [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00")] - [TestCase(10, null, null, null, null)] - public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) + [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(10, null, null, null, null, null)] + public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }).ToList(); await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { @@ -765,7 +769,8 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); @@ -776,6 +781,7 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMy Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index 179fd1c5..ce9f5c14 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -463,16 +463,18 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow } [Test] - [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22")] - [TestCase(null, null, "1970-1-1 00:00:01")] - public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cTimestamp) + [TestCase(1999, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(null, null, null, null, null)] + public async Task TestMySqlDateTimeTypes(short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CTimestamp = cTimestamp }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }); var expected = new QuerySql.GetMysqlDatetimeTypesRow { CYear = cYear, CDate = cDate, - CTimestamp = cTimestamp + CDatetime = cDatetime, + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypes(); AssertSingularEquals(expected, actual); @@ -480,7 +482,9 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesRow x, QuerySql.GetMysql { Assert.That(x.CYear, Is.EqualTo(y.CYear)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); + Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } @@ -753,11 +757,11 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes } [Test] - [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00")] - [TestCase(10, null, null, null, null)] - public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp) + [TestCase(100, 1993, "2000-1-30", "1983-11-3 02:01:22", "2010-1-30 08:11:00", "02:01:22")] + [TestCase(10, null, null, null, null, null)] + public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cDate, DateTime? cDatetime, DateTime? cTimestamp, TimeSpan? cTime) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlDatetimeTypesBatchArgs { CYear = cYear, CDate = cDate, CDatetime = cDatetime, CTimestamp = cTimestamp, CTime = cTime }).ToList(); await QuerySql.InsertMysqlDatetimeTypesBatch(batchArgs); var expected = new QuerySql.GetMysqlDatetimeTypesCntRow { @@ -765,7 +769,8 @@ public async Task TestDateTimeCopyFrom(int batchSize, short? cYear, DateTime? cD CYear = cYear, CDate = cDate, CDatetime = cDatetime, - CTimestamp = cTimestamp + CTimestamp = cTimestamp, + CTime = cTime }; var actual = await QuerySql.GetMysqlDatetimeTypesCnt(); AssertSingularEquals(expected, actual); @@ -776,6 +781,7 @@ void AssertSingularEquals(QuerySql.GetMysqlDatetimeTypesCntRow x, QuerySql.GetMy Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CDatetime, Is.EqualTo(y.CDatetime)); Assert.That(x.CTimestamp, Is.EqualTo(y.CTimestamp)); + Assert.That(x.CTime, Is.EqualTo(y.CTime)); } } diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index c45114c7..a26a969a 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -39,9 +39,9 @@ public class MysqlDatetimeType { public short? CYear { get; init; } public DateTime? CDate { get; init; } - public string? CTime { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } + public TimeSpan? CTime { get; init; } }; public class MysqlBinaryType { diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index cc54cd54..beaf0e50 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -845,13 +845,14 @@ public async Task TruncateMysqlTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; public class InsertMysqlDatetimeTypesArgs { public short? CYear { get; init; } public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } + public TimeSpan? CTime { get; init; } }; public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { @@ -860,6 +861,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) queryParams.Add("c_date", args.CDate); queryParams.Add("c_datetime", args.CDatetime); queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_time", args.CTime); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) @@ -878,6 +880,7 @@ public class InsertMysqlDatetimeTypesBatchArgs public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } + public TimeSpan? CTime { get; init; } }; public async Task InsertMysqlDatetimeTypesBatch(List args) { @@ -902,6 +905,7 @@ public async Task InsertMysqlDatetimeTypesBatch(List(options); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -919,20 +923,20 @@ public async Task InsertMysqlDatetimeTypesBatch(List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; public class GetMysqlDatetimeTypesRow { public short? CYear { get; init; } public DateTime? CDate { get; init; } - public string? CTime { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } + public TimeSpan? CTime { get; init; } }; public async Task GetMysqlDatetimeTypes() { @@ -953,7 +957,7 @@ public class GetMysqlDatetimeTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; public class GetMysqlDatetimeTypesCntRow { public required long Cnt { get; init; } @@ -961,6 +965,7 @@ public class GetMysqlDatetimeTypesCntRow public DateTime? CDate { get; init; } public DateTime? CDatetime { get; init; } public DateTime? CTimestamp { get; init; } + public TimeSpan? CTime { get; init; } }; public async Task GetMysqlDatetimeTypesCnt() { diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index 42c5e321..77a79af4 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -328,33 +328,33 @@ } }, { - "name": "c_time", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "time" + "name": "datetime" } }, { - "name": "c_datetime", + "name": "c_timestamp", "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "datetime" + "name": "timestamp" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_time", + "length": 10, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "timestamp" + "name": "time" } } ] @@ -2933,7 +2933,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypes", "cmd": ":exec", "parameters": [ @@ -2996,6 +2996,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "comments": [ @@ -3007,7 +3022,7 @@ } }, { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -3070,6 +3085,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "filename": "query.sql", @@ -3078,7 +3108,7 @@ } }, { - "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", "name": "GetMysqlDatetimeTypes", "cmd": ":one", "columns": [ @@ -3104,17 +3134,6 @@ }, "originalName": "c_date" }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_datetime_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, { "name": "c_datetime", "length": 19, @@ -3136,12 +3155,23 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ @@ -3197,6 +3227,17 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 869ab2f07f9ddf131653c452e4cf9cf49121e564..10562c82be10ed0832a7314d5d3ab53f0071f9f9 100644 GIT binary patch delta 278 zcmdnGgK^<5#tq#Zj7F1Rc>^(r+2k$`oy`iI8?9MRuy8d^zThJ=`Gt!oj}Dgt5G2Q! zWag$$Hgpx+?BW{4D4=D>!760M#Rd^K;F`?urZ{;$2m9nZo*a{>xtX!$h6JbKBxEr@q1c^ delta 209 zcmZ3ui*f4?#tq#Z+*(|0$?+wbxv2(RlNWkPvaw0A0C|%;IkGn^aBj3_{lUyt%r#lj zJ$iCNpxEYcSAWLI&)m#d-Z66(!9^FkM=(yd^mJ4PnWLkS9G{X{k_uyk=;D&Z+yVu+ zAb($l$sgTCCU5jyz-@$T@nkJe{>cGe&4N~(g{7%Qm3qa6IZ}-*T-z8YTZbu4w(>S) a<3bp)%tl}`qpRWOd)}^$o5Oui@dE(Xr$&?j diff --git a/examples/MySqlConnectorDapperLegacyExample/Models.cs b/examples/MySqlConnectorDapperLegacyExample/Models.cs index 4f11fdb5..b56ea9dc 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Models.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Models.cs @@ -40,9 +40,9 @@ public class MysqlDatetimeType { public short? CYear { get; set; } public DateTime? CDate { get; set; } - public string CTime { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public class MysqlBinaryType { diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 122d6042..433326a3 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -319,13 +319,14 @@ public async Task TruncateMysqlTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; public class InsertMysqlDatetimeTypesArgs { public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { @@ -334,6 +335,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) queryParams.Add("c_date", args.CDate); queryParams.Add("c_datetime", args.CDatetime); queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_time", args.CTime); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) @@ -352,6 +354,7 @@ public class InsertMysqlDatetimeTypesBatchArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task InsertMysqlDatetimeTypesBatch(List args) { @@ -376,6 +379,7 @@ public async Task InsertMysqlDatetimeTypesBatch(List(options); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -393,20 +397,20 @@ public async Task InsertMysqlDatetimeTypesBatch(List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; public class GetMysqlDatetimeTypesRow { public short? CYear { get; set; } public DateTime? CDate { get; set; } - public string CTime { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task GetMysqlDatetimeTypes() { @@ -427,7 +431,7 @@ public async Task GetMysqlDatetimeTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; public class GetMysqlDatetimeTypesCntRow { public long Cnt { get; set; } @@ -435,6 +439,7 @@ public class GetMysqlDatetimeTypesCntRow public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task GetMysqlDatetimeTypesCnt() { diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index fb2c6f5f..a17db6c2 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -328,33 +328,33 @@ } }, { - "name": "c_time", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "time" + "name": "datetime" } }, { - "name": "c_datetime", + "name": "c_timestamp", "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "datetime" + "name": "timestamp" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_time", + "length": 10, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "timestamp" + "name": "time" } } ] @@ -2082,7 +2082,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypes", "cmd": ":exec", "parameters": [ @@ -2145,6 +2145,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "comments": [ @@ -2156,7 +2171,7 @@ } }, { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -2219,6 +2234,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "filename": "query.sql", @@ -2227,7 +2257,7 @@ } }, { - "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", "name": "GetMysqlDatetimeTypes", "cmd": ":one", "columns": [ @@ -2253,17 +2283,6 @@ }, "originalName": "c_date" }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_datetime_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, { "name": "c_datetime", "length": 19, @@ -2285,12 +2304,23 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ @@ -2346,6 +2376,17 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index 54a1a1c2..93acf9db 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.message +++ b/examples/MySqlConnectorDapperLegacyExample/request.message @@ -44,13 +44,13 @@ c_longtext0 c_set0R  mysql_typesbmysql_types_c_setШ mysql_datetime_types3 c_year0         Rmysql_datetime_typesbyear3 -c_date0         Rmysql_datetime_typesbdate* -c_time0 -Rmysql_datetime_typesbtime2 +c_date0         Rmysql_datetime_typesbdate2 c_datetime0Rmysql_datetime_typesb datetime4 - c_timestamp0Rmysql_datetime_typesb  timestampИ + c_timestamp0Rmysql_datetime_typesb  timestamp* +c_time0 +Rmysql_datetime_typesbtimeИ mysql_binary_types& c_bit0Rmysql_binary_typesbbit, c_binary0Rmysql_binary_typesbbinary2 @@ -356,60 +356,68 @@ c_longtext"2 c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql° -s +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql╚ +В INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC +VALUES (?, ?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF c_datetime0Rpublicmysql_datetime_typesb datetimez c_datetime*MI - c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp2 Datetime types : query.sqlBmysql_datetime_typesю -rINSERT INTO mysql_datetime_types + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp*>: +c_time0 +Rpublicmysql_datetime_typesbtimezc_time2 Datetime types : query.sqlBmysql_datetime_types╛ +БINSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC +VALUES (?, ?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF c_datetime0Rpublicmysql_datetime_typesb datetimez c_datetime*MI - c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp: query.sqlBmysql_datetime_types│ -XSELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; -c_year0         Rmysql_datetime_typesbyearzc_year"; -c_date0         Rmysql_datetime_typesbdatezc_date"2 + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp*>: c_time0 -Rmysql_datetime_typesbtimezc_time"> +Rpublicmysql_datetime_typesbtimezc_time: query.sqlBmysql_datetime_types│ +XSELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"> c_datetime0Rmysql_datetime_typesb datetimez c_datetime"A - c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlБ -╢SELECT + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp"2 +c_time0 +Rmysql_datetime_typesbtimezc_time: query.sql═ +╬SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time LIMIT 1GetMysqlDatetimeTypesCnt:one" cnt0         @bbigint"; c_year0         Rmysql_datetime_typesbyearzc_year"; @@ -418,7 +426,9 @@ LIMIT 1GetMysqlDatetimeTypesCnt:one" c_datetime0Rmysql_datetime_typesb datetimez c_datetime"A - c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlS + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp"2 +c_time0 +Rmysql_datetime_typesbtimezc_time: query.sqlS #TRUNCATE TABLE mysql_datetime_typesTruncateMysqlDatetimeTypes:exec: query.sqlУ о INSERT INTO mysql_binary_types diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index 4a6cea9b..d2ea81a0 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -6,7 +6,7 @@ namespace MySqlConnectorExampleGen; public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); -public readonly record struct MysqlDatetimeType(short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp); +public readonly record struct MysqlDatetimeType(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index 60bdab37..cccb9210 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -1133,8 +1133,8 @@ public async Task TruncateMysqlTypes() } } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; - public readonly record struct InsertMysqlDatetimeTypesArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; + public readonly record struct InsertMysqlDatetimeTypesArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { if (this.Transaction == null) @@ -1148,6 +1148,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1165,11 +1166,12 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public readonly record struct InsertMysqlDatetimeTypesBatchArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp); + public readonly record struct InsertMysqlDatetimeTypesBatchArgs(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public async Task InsertMysqlDatetimeTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; @@ -1193,6 +1195,7 @@ public async Task InsertMysqlDatetimeTypesBatch(List(options); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -1210,14 +1213,14 @@ public async Task InsertMysqlDatetimeTypesBatch(List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; - public readonly record struct GetMysqlDatetimeTypesRow(short? CYear, DateTime? CDate, string? CTime, DateTime? CDatetime, DateTime? CTimestamp); + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; + public readonly record struct GetMysqlDatetimeTypesRow(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public async Task GetMysqlDatetimeTypes() { if (this.Transaction == null) @@ -1235,9 +1238,9 @@ public async Task InsertMysqlDatetimeTypesBatch(List(4) }; } } @@ -1264,9 +1267,9 @@ public async Task InsertMysqlDatetimeTypesBatch(List(4) }; } } @@ -1275,8 +1278,8 @@ public async Task InsertMysqlDatetimeTypesBatch(List GetMysqlDatetimeTypesCnt() { if (this.Transaction == null) @@ -1296,7 +1299,8 @@ public async Task InsertMysqlDatetimeTypesBatch(List(5) }; } } @@ -1325,7 +1329,8 @@ public async Task InsertMysqlDatetimeTypesBatch(List(5) }; } } diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index b3e5fb1a..18c77bee 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -328,33 +328,33 @@ } }, { - "name": "c_time", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "time" + "name": "datetime" } }, { - "name": "c_datetime", + "name": "c_timestamp", "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "datetime" + "name": "timestamp" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_time", + "length": 10, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "timestamp" + "name": "time" } } ] @@ -2933,7 +2933,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypes", "cmd": ":exec", "parameters": [ @@ -2996,6 +2996,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "comments": [ @@ -3007,7 +3022,7 @@ } }, { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -3070,6 +3085,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "filename": "query.sql", @@ -3078,7 +3108,7 @@ } }, { - "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", "name": "GetMysqlDatetimeTypes", "cmd": ":one", "columns": [ @@ -3104,17 +3134,6 @@ }, "originalName": "c_date" }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_datetime_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, { "name": "c_datetime", "length": 19, @@ -3136,12 +3155,23 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ @@ -3197,6 +3227,17 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index 21a116ef0eeefc2d15e12c26b686e1727b42b52f..7057b8c1c95a11fb164564ad8336661d52d8aa17 100644 GIT binary patch delta 294 zcmZ3wgK^?6#tjV|j7F31djm0t+2nc-9Tuq$X0FKx{Y5wHan7)2J;B1nnsOAL<= zmjVzZ$CqU0rcO@KmfURP>dz>kWyirPWW~h>5jWtP9O$Mv`IIOB}6BZhtXsQH=fC@Ue1D6oQ0*SMU{HRg*j4IX# diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index 43b26c13..fdcce71b 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -40,9 +40,9 @@ public class MysqlDatetimeType { public short? CYear { get; set; } public DateTime? CDate { get; set; } - public string CTime { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public class MysqlBinaryType { diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index bfa43b46..60c40449 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -532,13 +532,14 @@ public async Task TruncateMysqlTypes() } } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp)"; + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; public class InsertMysqlDatetimeTypesArgs { public short? CYear { get; set; } public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { @@ -553,6 +554,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -570,6 +572,7 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -580,6 +583,7 @@ public class InsertMysqlDatetimeTypesBatchArgs public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task InsertMysqlDatetimeTypesBatch(List args) { @@ -604,6 +608,7 @@ public async Task InsertMysqlDatetimeTypesBatch(List(options); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -621,20 +626,20 @@ public async Task InsertMysqlDatetimeTypesBatch(List { "c_year", "c_date", "c_datetime", "c_timestamp" }); + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1"; + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; public class GetMysqlDatetimeTypesRow { public short? CYear { get; set; } public DateTime? CDate { get; set; } - public string CTime { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task GetMysqlDatetimeTypes() { @@ -653,9 +658,9 @@ public async Task GetMysqlDatetimeTypes() { CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), - CTime = reader.IsDBNull(2) ? null : reader.GetString(2), - CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) }; } } @@ -682,9 +687,9 @@ public async Task GetMysqlDatetimeTypes() { CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), - CTime = reader.IsDBNull(2) ? null : reader.GetString(2), - CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) }; } } @@ -693,7 +698,7 @@ public async Task GetMysqlDatetimeTypes() return null; } - private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp LIMIT 1"; + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; public class GetMysqlDatetimeTypesCntRow { public long Cnt { get; set; } @@ -701,6 +706,7 @@ public class GetMysqlDatetimeTypesCntRow public DateTime? CDate { get; set; } public DateTime? CDatetime { get; set; } public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; public async Task GetMysqlDatetimeTypesCnt() { @@ -721,7 +727,8 @@ public async Task GetMysqlDatetimeTypesCnt() CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), + CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) }; } } @@ -750,7 +757,8 @@ public async Task GetMysqlDatetimeTypesCnt() CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4) + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), + CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) }; } } diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index 22eb9237..867f5c46 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -328,33 +328,33 @@ } }, { - "name": "c_time", - "length": 10, + "name": "c_datetime", + "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "time" + "name": "datetime" } }, { - "name": "c_datetime", + "name": "c_timestamp", "length": 19, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "datetime" + "name": "timestamp" } }, { - "name": "c_timestamp", - "length": 19, + "name": "c_time", + "length": 10, "table": { "name": "mysql_datetime_types" }, "type": { - "name": "timestamp" + "name": "time" } } ] @@ -2082,7 +2082,7 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypes", "cmd": ":exec", "parameters": [ @@ -2145,6 +2145,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "comments": [ @@ -2156,7 +2171,7 @@ } }, { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp\n) \nVALUES (?, ?, ?, ?)", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", "name": "InsertMysqlDatetimeTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -2219,6 +2234,21 @@ }, "originalName": "c_timestamp" } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } } ], "filename": "query.sql", @@ -2227,7 +2257,7 @@ } }, { - "text": "SELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1", + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", "name": "GetMysqlDatetimeTypes", "cmd": ":one", "columns": [ @@ -2253,17 +2283,6 @@ }, "originalName": "c_date" }, - { - "name": "c_time", - "length": 10, - "table": { - "name": "mysql_datetime_types" - }, - "type": { - "name": "time" - }, - "originalName": "c_time" - }, { "name": "c_datetime", "length": 19, @@ -2285,12 +2304,23 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ @@ -2346,6 +2376,17 @@ "name": "timestamp" }, "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } ], "filename": "query.sql" diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index d4107f93..70a15d96 100644 --- a/examples/MySqlConnectorLegacyExample/request.message +++ b/examples/MySqlConnectorLegacyExample/request.message @@ -44,13 +44,13 @@ c_longtext0 c_set0R  mysql_typesbmysql_types_c_setШ mysql_datetime_types3 c_year0         Rmysql_datetime_typesbyear3 -c_date0         Rmysql_datetime_typesbdate* -c_time0 -Rmysql_datetime_typesbtime2 +c_date0         Rmysql_datetime_typesbdate2 c_datetime0Rmysql_datetime_typesb datetime4 - c_timestamp0Rmysql_datetime_typesb  timestampИ + c_timestamp0Rmysql_datetime_typesb  timestamp* +c_time0 +Rmysql_datetime_typesbtimeИ mysql_binary_types& c_bit0Rmysql_binary_typesbbit, c_binary0Rmysql_binary_typesbbinary2 @@ -356,60 +356,68 @@ c_longtext"2 c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql° -s +TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql╚ +В INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC +VALUES (?, ?, ?, ?, ?)InsertMysqlDatetimeTypes:exec*GC c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF c_datetime0Rpublicmysql_datetime_typesb datetimez c_datetime*MI - c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp2 Datetime types : query.sqlBmysql_datetime_typesю -rINSERT INTO mysql_datetime_types + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp*>: +c_time0 +Rpublicmysql_datetime_typesbtimezc_time2 Datetime types : query.sqlBmysql_datetime_types╛ +БINSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC +VALUES (?, ?, ?, ?, ?)InsertMysqlDatetimeTypesBatch :copyfrom*GC c_year0         Rpublicmysql_datetime_typesbyearzc_year*GC c_date0         Rpublicmysql_datetime_typesbdatezc_date*JF c_datetime0Rpublicmysql_datetime_typesb datetimez c_datetime*MI - c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp: query.sqlBmysql_datetime_types│ -XSELECT c_year, c_date, c_time, c_datetime, c_timestamp FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; -c_year0         Rmysql_datetime_typesbyearzc_year"; -c_date0         Rmysql_datetime_typesbdatezc_date"2 + c_timestamp0Rpublicmysql_datetime_typesb  timestampz c_timestamp*>: c_time0 -Rmysql_datetime_typesbtimezc_time"> +Rpublicmysql_datetime_typesbtimezc_time: query.sqlBmysql_datetime_types│ +XSELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1GetMysqlDatetimeTypes:one"; +c_year0         Rmysql_datetime_typesbyearzc_year"; +c_date0         Rmysql_datetime_typesbdatezc_date"> c_datetime0Rmysql_datetime_typesb datetimez c_datetime"A - c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlБ -╢SELECT + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp"2 +c_time0 +Rmysql_datetime_typesbtimezc_time: query.sql═ +╬SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time LIMIT 1GetMysqlDatetimeTypesCnt:one" cnt0         @bbigint"; c_year0         Rmysql_datetime_typesbyearzc_year"; @@ -418,7 +426,9 @@ LIMIT 1GetMysqlDatetimeTypesCnt:one" c_datetime0Rmysql_datetime_typesb datetimez c_datetime"A - c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp: query.sqlS + c_timestamp0Rmysql_datetime_typesb  timestampz c_timestamp"2 +c_time0 +Rmysql_datetime_typesbtimezc_time: query.sqlS #TRUNCATE TABLE mysql_datetime_typesTruncateMysqlDatetimeTypes:exec: query.sqlУ о INSERT INTO mysql_binary_types diff --git a/examples/config/mysql/types/query.sql b/examples/config/mysql/types/query.sql index c2ec823e..a06ec5ba 100644 --- a/examples/config/mysql/types/query.sql +++ b/examples/config/mysql/types/query.sql @@ -123,9 +123,10 @@ INSERT INTO mysql_datetime_types c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?); -- name: InsertMysqlDatetimeTypesBatch :copyfrom INSERT INTO mysql_datetime_types @@ -133,9 +134,10 @@ INSERT INTO mysql_datetime_types c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time ) -VALUES (?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?); -- name: GetMysqlDatetimeTypes :one SELECT * FROM mysql_datetime_types LIMIT 1; @@ -146,13 +148,15 @@ SELECT c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, - c_timestamp + c_timestamp, + c_time LIMIT 1; -- name: TruncateMysqlDatetimeTypes :exec diff --git a/examples/config/mysql/types/schema.sql b/examples/config/mysql/types/schema.sql index a6130d86..7a70ab09 100644 --- a/examples/config/mysql/types/schema.sql +++ b/examples/config/mysql/types/schema.sql @@ -36,9 +36,9 @@ CREATE TABLE mysql_types ( CREATE TABLE mysql_datetime_types ( c_year YEAR, c_date DATE, - c_time TIME, c_datetime DATETIME, - c_timestamp TIMESTAMP + c_timestamp TIMESTAMP, + c_time TIME ); CREATE TABLE mysql_binary_types ( From b99b66cdc4c474e2f277b4add69dc1dce9f3fc51 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 16:48:39 +0200 Subject: [PATCH 11/33] fix: separate the rest of mysql data types by group --- .../EndToEndScaffold/Templates/MySqlTests.cs | 116 +- .../MySqlConnectorDapperTester.cs | 3 +- .../MySqlConnectorDapperTester.generated.cs | 103 +- end2end/EndToEndTests/MySqlConnectorTester.cs | 3 +- .../MySqlConnectorTester.generated.cs | 103 +- .../MySqlConnectorDapperTester.cs | 3 +- .../MySqlConnectorDapperTester.generated.cs | 103 +- .../MySqlConnectorTester.cs | 3 +- .../MySqlConnectorTester.generated.cs | 103 +- .../MySqlConnectorDapperExample/Models.cs | 49 +- .../MySqlConnectorDapperExample/QuerySql.cs | 286 +++- examples/MySqlConnectorDapperExample/Utils.cs | 16 +- .../MySqlConnectorDapperExample/request.json | 1375 +++++++++-------- .../request.message | Bin 23841 -> 25756 bytes .../Models.cs | 49 +- .../QuerySql.cs | 284 +++- .../Utils.cs | 16 +- .../request.json | 1375 +++++++++-------- .../request.message | 510 +++--- examples/MySqlConnectorExample/Models.cs | 43 +- examples/MySqlConnectorExample/QuerySql.cs | 418 +++-- examples/MySqlConnectorExample/Utils.cs | 4 +- examples/MySqlConnectorExample/request.json | 1375 +++++++++-------- .../MySqlConnectorExample/request.message | Bin 23825 -> 25740 bytes .../MySqlConnectorLegacyExample/Models.cs | 49 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 521 +++++-- examples/MySqlConnectorLegacyExample/Utils.cs | 4 +- .../MySqlConnectorLegacyExample/request.json | 1375 +++++++++-------- .../request.message | 510 +++--- examples/config/mysql/types/query.sql | 142 +- examples/config/mysql/types/schema.sql | 62 +- 31 files changed, 5034 insertions(+), 3969 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index 92faf85c..43c07b46 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using System.Text.Json; namespace EndToEndScaffold.Templates; @@ -23,7 +22,7 @@ public async Task TestMySqlStringTypes( string cText, string cLongtext) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CChar = cChar, CNchar = cNchar, @@ -35,7 +34,7 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CLongtext = cLongtext }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlStringTypesRow { CChar = cChar, CNchar = cNchar, @@ -46,10 +45,10 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); @@ -80,7 +79,7 @@ public async Task TestMySqlIntegerTypes( int? cInteger, long? cBigint) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CBool = cBool, CBoolean = cBoolean, @@ -92,7 +91,7 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CBigint = cBigint }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlNumericTypesRow { CBool = cBool, CBoolean = cBoolean, @@ -103,10 +102,10 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CBool, Is.EqualTo(y.CBool)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -135,7 +134,7 @@ public async Task TestMySqlFloatingPointTypes( double? cDouble, double? cDoublePrecision) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CFloat = cFloat, CNumeric = cNumeric, @@ -146,7 +145,7 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CDoublePrecision = cDoublePrecision }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlNumericTypesRow { CFloat = cFloat, CNumeric = cNumeric, @@ -156,10 +155,10 @@ await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -278,8 +277,8 @@ private static IEnumerable MySqlEnumTypesTestCases get { yield return new TestCaseData( - MysqlTypesCEnum.Medium, - new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee } + MysqlStringTypesCEnum.Medium, + new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee } ).SetName("Valid Enum values"); yield return new TestCaseData( @@ -292,24 +291,24 @@ private static IEnumerable MySqlEnumTypesTestCases [Test] [TestCaseSource(nameof(MySqlEnumTypesTestCases))] public async Task TestMySqlStringTypes( - MysqlTypesCEnum? cEnum, - HashSet cSet) + MysqlStringTypesCEnum? cEnum, + HashSet cSet) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CEnum = cEnum, CSet = cSet }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlStringTypesRow { CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); Assert.That(x.CSet, Is.EqualTo(y.CSet)); @@ -335,7 +334,7 @@ public async Task TestStringCopyFrom( string cLongtext) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CChar = cChar, CNchar = cNchar, @@ -347,8 +346,8 @@ public async Task TestStringCopyFrom( CLongtext = cLongtext }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -361,10 +360,10 @@ public async Task TestStringCopyFrom( CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -397,7 +396,7 @@ public async Task TestIntegerCopyFrom( long? cBigint) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, @@ -409,8 +408,8 @@ public async Task TestIntegerCopyFrom( CBigint = cBigint }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CBool = cBool, @@ -423,10 +422,10 @@ public async Task TestIntegerCopyFrom( CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBool, Is.EqualTo(y.CBool)); @@ -458,7 +457,7 @@ public async Task TestFloatingPointCopyFrom( double? cDoublePrecision) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, @@ -469,8 +468,8 @@ public async Task TestFloatingPointCopyFrom( CDoublePrecision = cDoublePrecision }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CFloat = cFloat, @@ -481,10 +480,10 @@ public async Task TestFloatingPointCopyFrom( CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); @@ -670,8 +669,8 @@ private static IEnumerable MySqlEnumCopyFromTestCases { yield return new TestCaseData( 100, - MysqlTypesCEnum.Big, - new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee } + MysqlStringTypesCEnum.Big, + new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee } ).SetName("Valid Enum values"); yield return new TestCaseData( @@ -686,27 +685,27 @@ private static IEnumerable MySqlEnumCopyFromTestCases [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] public async Task TestCopyFrom( int batchSize, - MysqlTypesCEnum? cEnum, - HashSet cSet) + MysqlStringTypesCEnum? cEnum, + HashSet cSet) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CEnum = cEnum, CSet = cSet }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); @@ -765,20 +764,20 @@ public async Task TestMySqlJsonDataType( if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); - var expected = new QuerySql.GetMysqlTypesRow + var expected = new QuerySql.GetMysqlStringTypesRow { CJson = cParsedJson, CJsonStringOverride = cJson }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -795,7 +794,7 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow public void TestMySqlInvalidJson() { Assert.ThrowsAsync(async () => await - QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); @@ -817,22 +816,22 @@ public async Task TestJsonCopyFrom( cParsedJson = JsonDocument.Parse(cJson).RootElement; var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertMysqlTypesBatchArgs + .Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }) .ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CJson = cParsedJson }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); @@ -853,9 +852,12 @@ public async Task TestMySqlDataTypesOverride( string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs + { + CInt = cInt + }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { - CInt = cInt, CVarchar = cVarchar }); await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs index 177141dd..6abddaf9 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.cs @@ -15,7 +15,8 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateExtendedBios(); - await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlNumericTypes(); + await QuerySql.TruncateMysqlStringTypes(); await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); } diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index 6d00c526..a52670d2 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -333,8 +333,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null, null, null, null)] public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); + var expected = new QuerySql.GetMysqlStringTypesRow { CChar = cChar, CNchar = cNchar, @@ -345,9 +345,9 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); @@ -365,8 +365,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null, null, null)] public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTinyint, short? cYear, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CBool = cBool, CBoolean = cBoolean, @@ -377,9 +377,9 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CBool, Is.EqualTo(y.CBool)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -437,8 +437,8 @@ public async Task TestMySqlTransactionRollback() [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CFloat = cFloat, CNumeric = cNumeric, @@ -448,9 +448,9 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -523,24 +523,24 @@ private static IEnumerable MySqlEnumTypesTestCases { get { - yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(MysqlStringTypesCEnum.Medium, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumTypesTestCases))] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestMySqlStringTypes(MysqlStringTypesCEnum? cEnum, HashSet cSet) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CEnum = cEnum, CSet = cSet }); + var expected = new QuerySql.GetMysqlStringTypesRow { CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); Assert.That(x.CSet, Is.EqualTo(y.CSet)); @@ -552,7 +552,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { @@ -604,15 +605,15 @@ public async Task TestMySqlJsonDataType(string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); + var expected = new QuerySql.GetMysqlStringTypesRow { CJson = cParsedJson, CJsonStringOverride = cJson }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -629,16 +630,16 @@ public async Task TestJsonCopyFrom(int batchSize, string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CJson = cParsedJson }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); @@ -650,7 +651,7 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [Test] public void TestMySqlInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); } [Test] @@ -658,9 +659,9 @@ public void TestMySqlInvalidJson() [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -672,9 +673,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -693,9 +694,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean, short? cTinyint, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CBool = cBool, @@ -707,9 +708,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBool, Is.EqualTo(y.CBool)); @@ -728,9 +729,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CFloat = cFloat, @@ -741,9 +742,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); @@ -823,26 +824,26 @@ private static IEnumerable MySqlEnumCopyFromTestCases { get { - yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(100, MysqlStringTypesCEnum.Big, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(10, null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestCopyFrom(int batchSize, MysqlStringTypesCEnum? cEnum, HashSet cSet) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); diff --git a/end2end/EndToEndTests/MySqlConnectorTester.cs b/end2end/EndToEndTests/MySqlConnectorTester.cs index 9d580ca1..dfa6f154 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.cs @@ -15,7 +15,8 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateExtendedBios(); - await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlNumericTypes(); + await QuerySql.TruncateMysqlStringTypes(); await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); } diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 6f516cce..4ed009c2 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -333,8 +333,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null, null, null, null)] public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); + var expected = new QuerySql.GetMysqlStringTypesRow { CChar = cChar, CNchar = cNchar, @@ -345,9 +345,9 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); @@ -365,8 +365,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null, null, null)] public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTinyint, short? cYear, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CBool = cBool, CBoolean = cBoolean, @@ -377,9 +377,9 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CBool, Is.EqualTo(y.CBool)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -437,8 +437,8 @@ public async Task TestMySqlTransactionRollback() [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CFloat = cFloat, CNumeric = cNumeric, @@ -448,9 +448,9 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -523,24 +523,24 @@ private static IEnumerable MySqlEnumTypesTestCases { get { - yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(MysqlStringTypesCEnum.Medium, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumTypesTestCases))] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestMySqlStringTypes(MysqlStringTypesCEnum? cEnum, HashSet cSet) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CEnum = cEnum, CSet = cSet }); + var expected = new QuerySql.GetMysqlStringTypesRow { CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); Assert.That(x.CSet, Is.EqualTo(y.CSet)); @@ -552,7 +552,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { @@ -604,15 +605,15 @@ public async Task TestMySqlJsonDataType(string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); + var expected = new QuerySql.GetMysqlStringTypesRow { CJson = cParsedJson, CJsonStringOverride = cJson }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -629,16 +630,16 @@ public async Task TestJsonCopyFrom(int batchSize, string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CJson = cParsedJson }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); @@ -650,7 +651,7 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [Test] public void TestMySqlInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); } [Test] @@ -658,9 +659,9 @@ public void TestMySqlInvalidJson() [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -672,9 +673,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -693,9 +694,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean, short? cTinyint, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CBool = cBool, @@ -707,9 +708,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBool, Is.EqualTo(y.CBool)); @@ -728,9 +729,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CFloat = cFloat, @@ -741,9 +742,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); @@ -823,26 +824,26 @@ private static IEnumerable MySqlEnumCopyFromTestCases { get { - yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(100, MysqlStringTypesCEnum.Big, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(10, null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestCopyFrom(int batchSize, MysqlStringTypesCEnum? cEnum, HashSet cSet) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs index ff474438..4df7d463 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.cs @@ -15,7 +15,8 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateExtendedBios(); - await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlNumericTypes(); + await QuerySql.TruncateMysqlStringTypes(); await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 22d5bf26..687a2ea0 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -333,8 +333,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null, null, null, null)] public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); + var expected = new QuerySql.GetMysqlStringTypesRow { CChar = cChar, CNchar = cNchar, @@ -345,9 +345,9 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); @@ -365,8 +365,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null, null, null)] public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTinyint, short? cYear, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CBool = cBool, CBoolean = cBoolean, @@ -377,9 +377,9 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CBool, Is.EqualTo(y.CBool)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -437,8 +437,8 @@ public async Task TestMySqlTransactionRollback() [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CFloat = cFloat, CNumeric = cNumeric, @@ -448,9 +448,9 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -523,24 +523,24 @@ private static IEnumerable MySqlEnumTypesTestCases { get { - yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(MysqlStringTypesCEnum.Medium, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumTypesTestCases))] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestMySqlStringTypes(MysqlStringTypesCEnum? cEnum, HashSet cSet) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CEnum = cEnum, CSet = cSet }); + var expected = new QuerySql.GetMysqlStringTypesRow { CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); Assert.That(x.CSet, Is.EqualTo(y.CSet)); @@ -552,7 +552,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { @@ -604,15 +605,15 @@ public async Task TestMySqlJsonDataType(string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); + var expected = new QuerySql.GetMysqlStringTypesRow { CJson = cParsedJson, CJsonStringOverride = cJson }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -629,16 +630,16 @@ public async Task TestJsonCopyFrom(int batchSize, string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CJson = cParsedJson }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); @@ -650,7 +651,7 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [Test] public void TestMySqlInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); } [Test] @@ -658,9 +659,9 @@ public void TestMySqlInvalidJson() [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -672,9 +673,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -693,9 +694,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean, short? cTinyint, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CBool = cBool, @@ -707,9 +708,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBool, Is.EqualTo(y.CBool)); @@ -728,9 +729,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CFloat = cFloat, @@ -741,9 +742,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); @@ -823,26 +824,26 @@ private static IEnumerable MySqlEnumCopyFromTestCases { get { - yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(100, MysqlStringTypesCEnum.Big, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(10, null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestCopyFrom(int batchSize, MysqlStringTypesCEnum? cEnum, HashSet cSet) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs index 9573c794..b0f68b1b 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.cs @@ -15,7 +15,8 @@ public async Task EmptyTestsTable() { await QuerySql.DeleteAllAuthors(); await QuerySql.TruncateExtendedBios(); - await QuerySql.TruncateMysqlTypes(); + await QuerySql.TruncateMysqlNumericTypes(); + await QuerySql.TruncateMysqlStringTypes(); await QuerySql.TruncateMysqlDatetimeTypes(); await QuerySql.TruncateMysqlBinaryTypes(); } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index ce9f5c14..4afc4535 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -333,8 +333,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null, null, null, null)] public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }); + var expected = new QuerySql.GetMysqlStringTypesRow { CChar = cChar, CNchar = cNchar, @@ -345,9 +345,9 @@ public async Task TestMySqlStringTypes(string cChar, string cNchar, string cNati CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CNchar, Is.EqualTo(y.CNchar)); @@ -365,8 +365,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, null, null, null, null, null, null, null)] public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTinyint, short? cYear, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CBool = cBool, CBoolean = cBoolean, @@ -377,9 +377,9 @@ public async Task TestMySqlIntegerTypes(bool? cBool, bool? cBoolean, short? cTin CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CBool, Is.EqualTo(y.CBool)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -437,8 +437,8 @@ public async Task TestMySqlTransactionRollback() [TestCase(null, null, null, null, null, null, null)] public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }); + var expected = new QuerySql.GetMysqlNumericTypesRow { CFloat = cFloat, CNumeric = cNumeric, @@ -448,9 +448,9 @@ public async Task TestMySqlFloatingPointTypes(float? cFloat, decimal? cNumeric, CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlNumericTypesRow y) { Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -523,24 +523,24 @@ private static IEnumerable MySqlEnumTypesTestCases { get { - yield return new TestCaseData(MysqlTypesCEnum.Medium, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(MysqlStringTypesCEnum.Medium, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumTypesTestCases))] - public async Task TestMySqlStringTypes(MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestMySqlStringTypes(MysqlStringTypesCEnum? cEnum, HashSet cSet) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CEnum = cEnum, CSet = cSet }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CEnum = cEnum, CSet = cSet }); + var expected = new QuerySql.GetMysqlStringTypesRow { CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); Assert.That(x.CSet, Is.EqualTo(y.CSet)); @@ -552,7 +552,8 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow [TestCase(null, null, "1971-01-01 00:00:00")] public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CInt = cInt, CVarchar = cVarchar }); + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); var expected = new QuerySql.GetMysqlFunctionsRow { @@ -604,15 +605,15 @@ public async Task TestMySqlJsonDataType(string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); - var expected = new QuerySql.GetMysqlTypesRow + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJson = cParsedJson, CJsonStringOverride = cJson }); + var expected = new QuerySql.GetMysqlStringTypesRow { CJson = cParsedJson, CJsonStringOverride = cJson }; - var actual = await QuerySql.GetMysqlTypes(); + var actual = await QuerySql.GetMysqlStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesRow x, QuerySql.GetMysqlTypesRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -629,16 +630,16 @@ public async Task TestJsonCopyFrom(int batchSize, string cJson) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CJson = cParsedJson }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); @@ -650,7 +651,7 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [Test] public void TestMySqlInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlTypes(new QuerySql.InsertMysqlTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); } [Test] @@ -658,9 +659,9 @@ public void TestMySqlInvalidJson() [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, string cNationalChar, string cVarchar, string cTinytext, string cMediumtext, string cText, string cLongtext) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CChar = cChar, CNchar = cNchar, CNationalChar = cNationalChar, CVarchar = cVarchar, CTinytext = cTinytext, CMediumtext = cMediumtext, CText = cText, CLongtext = cLongtext }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -672,9 +673,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cNchar, CText = cText, CLongtext = cLongtext }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -693,9 +694,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean, short? cTinyint, short? cSmallint, int? cMediumint, int? cInt, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CBool = cBool, CBoolean = cBoolean, CTinyint = cTinyint, CSmallint = cSmallint, CMediumint = cMediumint, CInt = cInt, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CBool = cBool, @@ -707,9 +708,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBool, bool? cBoolean CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBool, Is.EqualTo(y.CBool)); @@ -728,9 +729,9 @@ void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypes [TestCase(10, null, null, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decimal? cNumeric, decimal? cDecimal, decimal? cDec, decimal? cFixed, double? cDouble, double? cDoublePrecision) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlNumericTypesBatchArgs { CFloat = cFloat, CNumeric = cNumeric, CDecimal = cDecimal, CDec = cDec, CFixed = cFixed, CDouble = cDouble, CDoublePrecision = cDoublePrecision }).ToList(); + await QuerySql.InsertMysqlNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlNumericTypesCntRow { Cnt = batchSize, CFloat = cFloat, @@ -741,9 +742,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cFloat, decima CDouble = cDouble, CDoublePrecision = cDoublePrecision }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlNumericTypesCntRow x, QuerySql.GetMysqlNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CFloat, Is.EqualTo(y.CFloat)); @@ -823,26 +824,26 @@ private static IEnumerable MySqlEnumCopyFromTestCases { get { - yield return new TestCaseData(100, MysqlTypesCEnum.Big, new HashSet { MysqlTypesCSet.Tea, MysqlTypesCSet.Coffee }).SetName("Valid Enum values"); + yield return new TestCaseData(100, MysqlStringTypesCEnum.Big, new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee }).SetName("Valid Enum values"); yield return new TestCaseData(10, null, null).SetName("Enum with null values"); } } [Test] [TestCaseSource(nameof(MySqlEnumCopyFromTestCases))] - public async Task TestCopyFrom(int batchSize, MysqlTypesCEnum? cEnum, HashSet cSet) + public async Task TestCopyFrom(int batchSize, MysqlStringTypesCEnum? cEnum, HashSet cSet) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); - await QuerySql.InsertMysqlTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CEnum = cEnum, CSet = cSet }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow { Cnt = batchSize, CEnum = cEnum, CSet = cSet }; - var actual = await QuerySql.GetMysqlTypesCnt(); + var actual = await QuerySql.GetMysqlStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlTypesCntRow x, QuerySql.GetMysqlTypesCntRow y) + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index a26a969a..d71efd9e 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -5,7 +5,7 @@ using System.Text.Json; namespace MySqlConnectorDapperExampleGen; -public class MysqlType +public class MysqlNumericType { public bool? CBool { get; init; } public bool? CBoolean { get; init; } @@ -22,6 +22,9 @@ public class MysqlType public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } +}; +public class MysqlStringType +{ public string? CChar { get; init; } public string? CNchar { get; init; } public string? CNationalChar { get; init; } @@ -32,8 +35,8 @@ public class MysqlType public string? CLongtext { get; init; } public JsonElement? CJson { get; init; } public JsonElement? CJsonStringOverride { get; init; } - public MysqlTypesCEnum? CEnum { get; init; } - public HashSet? CSet { get; init; } + public MysqlStringTypesCEnum? CEnum { get; init; } + public HashSet? CSet { get; init; } }; public class MysqlDatetimeType { @@ -73,7 +76,7 @@ public class ExtendedBio public ExtendedBiosBioType? BioType { get; init; } public HashSet? AuthorType { get; init; } }; -public enum MysqlTypesCEnum +public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value Small = 1, @@ -81,27 +84,27 @@ public enum MysqlTypesCEnum Big = 3 } -public static class MysqlTypesCEnumExtensions +public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCEnum.Invalid, - ["small"] = MysqlTypesCEnum.Small, - ["medium"] = MysqlTypesCEnum.Medium, - ["big"] = MysqlTypesCEnum.Big + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCEnumSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum MysqlTypesCSet +public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value Tea = 1, @@ -109,23 +112,23 @@ public enum MysqlTypesCSet Milk = 3 } -public static class MysqlTypesCSetExtensions +public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCSet.Invalid, - ["tea"] = MysqlTypesCSet.Tea, - ["coffee"] = MysqlTypesCSet.Coffee, - ["milk"] = MysqlTypesCSet.Milk + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - public static MysqlTypesCSet ToMysqlTypesCSet(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCSetSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index beaf0e50..1fc85a4b 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -570,8 +570,8 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public class InsertMysqlTypesArgs + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public class InsertMysqlNumericTypesArgs { public bool? CBool { get; init; } public bool? CBoolean { get; init; } @@ -588,20 +588,8 @@ public class InsertMysqlTypesArgs public double? CFloat { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } - public string? CChar { get; init; } - public string? CNchar { get; init; } - public string? CNationalChar { get; init; } - public string? CVarchar { get; init; } - public string? CTinytext { get; init; } - public string? CMediumtext { get; init; } - public string? CText { get; init; } - public string? CLongtext { get; init; } - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public MysqlTypesCEnum? CEnum { get; init; } - public HashSet? CSet { get; init; } }; - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { var queryParams = new Dictionary(); queryParams.Add("c_bool", args.CBool); @@ -619,31 +607,19 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_float", args.CFloat); queryParams.Add("c_double", args.CDouble); queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_nchar", args.CNchar); - queryParams.Add("c_national_char", args.CNationalChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_tinytext", args.CTinytext); - queryParams.Add("c_mediumtext", args.CMediumtext); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_longtext", args.CLongtext); - queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_enum", args.CEnum); - queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); + await connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlTypesBatchArgs + public class InsertMysqlNumericTypesBatchArgs { public bool? CBool { get; init; } public bool? CBoolean { get; init; } @@ -660,20 +636,8 @@ public class InsertMysqlTypesBatchArgs public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } - public string? CChar { get; init; } - public string? CNchar { get; init; } - public string? CNationalChar { get; init; } - public string? CVarchar { get; init; } - public string? CTinytext { get; init; } - public string? CMediumtext { get; init; } - public string? CText { get; init; } - public string? CLongtext { get; init; } - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public MysqlTypesCEnum? CEnum { get; init; } - public HashSet? CSet { get; init; } }; - public async Task InsertMysqlTypesBatch(List args) + public async Task InsertMysqlNumericTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; var config = new CsvConfiguration(CultureInfo.CurrentCulture) @@ -696,16 +660,11 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -715,7 +674,7 @@ public async Task InsertMysqlTypesBatch(List args) var loader = new MySqlBulkLoader(connection) { Local = true, - TableName = "mysql_types", + TableName = "mysql_numeric_types", FileName = "input.csv", FieldTerminator = ",", FieldQuotationCharacter = '"', @@ -723,14 +682,14 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; - public class GetMysqlTypesRow + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public class GetMysqlNumericTypesRow { public bool? CBool { get; init; } public bool? CBoolean { get; init; } @@ -747,26 +706,14 @@ public class GetMysqlTypesRow public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } - public string? CChar { get; init; } - public string? CNchar { get; init; } - public string? CNationalChar { get; init; } - public string? CVarchar { get; init; } - public string? CTinytext { get; init; } - public string? CMediumtext { get; init; } - public string? CText { get; init; } - public string? CLongtext { get; init; } - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public MysqlTypesCEnum? CEnum { get; init; } - public HashSet? CSet { get; init; } }; - public async Task GetMysqlTypes() + public async Task GetMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql); return result; } } @@ -776,11 +723,11 @@ public class GetMysqlTypesRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public class GetMysqlTypesCntRow + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public class GetMysqlNumericTypesCntRow { public required long Cnt { get; init; } public bool? CBool { get; init; } @@ -798,6 +745,187 @@ public class GetMysqlTypesCntRow public decimal? CFixed { get; init; } public double? CDouble { get; init; } public double? CDoublePrecision { get; init; } + }; + public async Task GetMysqlNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); + } + + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlNumericTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlNumericTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public class InsertMysqlStringTypesArgs + { + public string? CChar { get; init; } + public string? CNchar { get; init; } + public string? CNationalChar { get; init; } + public string? CVarchar { get; init; } + public string? CTinytext { get; init; } + public string? CMediumtext { get; init; } + public string? CText { get; init; } + public string? CLongtext { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public MysqlStringTypesCEnum? CEnum { get; init; } + public HashSet? CSet { get; init; } + }; + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_nchar", args.CNchar); + queryParams.Add("c_national_char", args.CNationalChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_tinytext", args.CTinytext); + queryParams.Add("c_mediumtext", args.CMediumtext); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_longtext", args.CLongtext); + queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_enum", args.CEnum); + queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlStringTypesBatchArgs + { + public string? CChar { get; init; } + public string? CNchar { get; init; } + public string? CNationalChar { get; init; } + public string? CVarchar { get; init; } + public string? CTinytext { get; init; } + public string? CMediumtext { get; init; } + public string? CText { get; init; } + public string? CLongtext { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public MysqlStringTypesCEnum? CEnum { get; init; } + public HashSet? CSet { get; init; } + }; + public async Task InsertMysqlStringTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public class GetMysqlStringTypesRow + { + public string? CChar { get; init; } + public string? CNchar { get; init; } + public string? CNationalChar { get; init; } + public string? CVarchar { get; init; } + public string? CTinytext { get; init; } + public string? CMediumtext { get; init; } + public string? CText { get; init; } + public string? CLongtext { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public MysqlStringTypesCEnum? CEnum { get; init; } + public HashSet? CSet { get; init; } + }; + public async Task GetMysqlStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public class GetMysqlStringTypesCntRow + { + public required long Cnt { get; init; } public string? CChar { get; init; } public string? CNchar { get; init; } public string? CNationalChar { get; init; } @@ -808,16 +936,16 @@ public class GetMysqlTypesCntRow public string? CLongtext { get; init; } public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } - public MysqlTypesCEnum? CEnum { get; init; } - public HashSet? CSet { get; init; } + public MysqlStringTypesCEnum? CEnum { get; init; } + public HashSet? CSet { get; init; } }; - public async Task GetMysqlTypesCnt() + public async Task GetMysqlStringTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql); return result; } } @@ -827,22 +955,22 @@ public class GetMysqlTypesCntRow throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlTypesSql); + await connection.ExecuteAsync(TruncateMysqlStringTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlStringTypesSql, transaction: this.Transaction); } private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; @@ -1168,7 +1296,7 @@ public async Task TruncateMysqlBinaryTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; public class GetMysqlFunctionsRow { public int? MaxInt { get; init; } diff --git a/examples/MySqlConnectorDapperExample/Utils.cs b/examples/MySqlConnectorDapperExample/Utils.cs index a34ee9ea..3486b103 100644 --- a/examples/MySqlConnectorDapperExample/Utils.cs +++ b/examples/MySqlConnectorDapperExample/Utils.cs @@ -30,7 +30,7 @@ public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -54,28 +54,28 @@ public override void SetValue(IDbDataParameter parameter, HashSet> + private class MysqlStringTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlTypesCSetSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToMysqlStringTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } } - public class MysqlTypesCSetCsvConverter : DefaultTypeConverter + public class MysqlStringTypesCSetCsvConverter : DefaultTypeConverter { public override string? ConvertToString(object? value, IWriterRow row, MemberMapData memberMapData) { if (value == null) return @"\N"; - if (value is HashSet setVal) + if (value is HashSet setVal) return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index 77a79af4..2bbf9802 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -27,14 +27,14 @@ "tables": [ { "rel": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "columns": [ { "name": "c_bool", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -44,7 +44,7 @@ "name": "c_boolean", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -54,7 +54,7 @@ "name": "c_tinyint", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -64,7 +64,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -74,7 +74,7 @@ "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -84,7 +84,7 @@ "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -94,7 +94,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -104,7 +104,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -114,7 +114,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -124,7 +124,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -134,7 +134,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -144,7 +144,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -154,7 +154,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -164,7 +164,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -174,17 +174,24 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_string_types" + }, + "columns": [ { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -194,7 +201,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -204,7 +211,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -214,7 +221,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -224,7 +231,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -234,7 +241,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -244,7 +251,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -254,7 +261,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -264,7 +271,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -274,7 +281,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -284,20 +291,20 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" } }, { "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" } } ] @@ -528,7 +535,7 @@ ], "enums": [ { - "name": "mysql_types_c_enum", + "name": "mysql_string_types_c_enum", "vals": [ "small", "medium", @@ -536,7 +543,7 @@ ] }, { - "name": "mysql_types_c_set", + "name": "mysql_string_types_c_set", "vals": [ "tea", "coffee", @@ -1473,8 +1480,8 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", "cmd": ":exec", "parameters": [ { @@ -1484,7 +1491,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1499,7 +1506,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1514,7 +1521,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1529,7 +1536,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1544,7 +1551,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1559,7 +1566,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1574,7 +1581,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1589,7 +1596,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1604,7 +1611,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1619,7 +1626,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1634,7 +1641,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1649,7 +1656,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1664,7 +1671,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1679,7 +1686,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1694,206 +1701,26 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } } ], "comments": [ - " Basic types " + " Numeric types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_types" + "name": "mysql_numeric_types" } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", "cmd": ":copyfrom", "parameters": [ { @@ -1903,7 +1730,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1918,7 +1745,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1933,7 +1760,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1948,7 +1775,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1963,7 +1790,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1978,7 +1805,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1993,7 +1820,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2008,7 +1835,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -2023,7 +1850,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -2038,7 +1865,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2053,7 +1880,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2068,7 +1895,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2083,7 +1910,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2098,7 +1925,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -2113,265 +1940,85 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2382,7 +2029,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2393,7 +2040,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -2404,7 +2051,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -2415,7 +2062,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2426,7 +2073,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2437,7 +2084,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2448,7 +2095,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2459,7 +2106,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -2470,332 +2117,748 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_char", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_char" + "originalName": "c_bool" }, { - "name": "c_nchar", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_nchar" + "originalName": "c_boolean" }, { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" }, { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" }, { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" }, { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" }, { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" }, { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" }, { - "name": "c_json", + "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "float" }, - "originalName": "c_json" + "originalName": "c_float" }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json_string_override" + "originalName": "c_numeric" }, { - "name": "c_enum", - "length": 6, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "decimal" }, - "originalName": "c_enum" + "originalName": "c_decimal" }, { - "name": "c_set", - "length": 15, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_dec" }, { - "name": "c_bool", - "length": 1, + "name": "c_fixed", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "decimal" }, - "originalName": "c_bool" + "originalName": "c_fixed" }, { - "name": "c_boolean", - "length": 1, + "name": "c_double", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_boolean" + "originalName": "c_double" }, { - "name": "c_tinyint", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_tinyint" + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } }, { - "name": "c_smallint", + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "smallint" + "name": "char" }, - "originalName": "c_smallint" + "originalName": "c_char" }, { - "name": "c_mediumint", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumint" + "name": "char" }, - "originalName": "c_mediumint" + "originalName": "c_nchar" }, { - "name": "c_int", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "char" }, - "originalName": "c_int" + "originalName": "c_national_char" }, { - "name": "c_integer", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "varchar" }, - "originalName": "c_integer" + "originalName": "c_varchar" }, { - "name": "c_bigint", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "tinytext" }, - "originalName": "c_bigint" + "originalName": "c_tinytext" }, { - "name": "c_float", + "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "float" + "name": "mediumtext" }, - "originalName": "c_float" + "originalName": "c_mediumtext" }, { - "name": "c_numeric", - "length": 10, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" + "originalName": "c_text" }, { - "name": "c_decimal", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "longtext" }, - "originalName": "c_decimal" + "originalName": "c_longtext" }, { - "name": "c_dec", - "length": 10, + "name": "c_json", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_dec" + "originalName": "c_json" }, { - "name": "c_fixed", - "length": 10, + "name": "c_json_string_override", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_fixed" + "originalName": "c_json_string_override" }, { - "name": "c_double", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_double" + "originalName": "c_enum" }, { - "name": "c_double_precision", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_set" }, - "originalName": "c_double_precision" + "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } }, { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2806,7 +2869,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2817,7 +2880,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2828,7 +2891,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -2839,7 +2902,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -2850,7 +2913,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -2861,7 +2924,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -2872,7 +2935,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -2883,7 +2946,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2894,7 +2957,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2905,10 +2968,10 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" }, "originalName": "c_enum" }, @@ -2916,10 +2979,10 @@ "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" }, "originalName": "c_set" } @@ -2927,8 +2990,8 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -3669,7 +3732,7 @@ "filename": "query.sql" }, { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 10562c82be10ed0832a7314d5d3ab53f0071f9f9..02755123bd9cc3901f98234c191ef6ef31f0692f 100644 GIT binary patch literal 25756 zcmeHQOK%*<5za2j)v_sB@<_Jj@oPMKm>g4lS(YW*dT`05Oq&mj4?mC?%yM@`t-ai( zW|p!@K|q{?k;DNEB$phVOMn15#BmG*1_C5OkN`n0Nsh_ExBP(I0wfows$bJH+cP^9 zB}a-$2-V zT=L6T+_lmG()H@;N*m294Ns-CpGu~@y5V}8DAGQrCuEP7WGlbxBu6FX%;v(?+IXX0 zcgxL&H&I$&cfBI3fsrfwS4!Tx`R|S!cEw%USeK(XZdI%Z=K`S{?W*HfG{GPegfnIGGEaa$7lA1v-5D(}zvVaT%k0_9LT3qL!@7`VeiPJF z@>bpEqzCQuPQ!Z}bX|9wrw4{W*oN;i8(^(@(>X+pfx$}EZw^j3%B9-;23U$`y!Vii zTi;lzRm=8=4~Rqa;~aUwJV@<*xxTUHdet&*BH!pIx$^Q#qfr~m%L(=%}Hp^g+!7$u~_%uXaUMzGTL?eUC~ z{lHT~_gboL^LYbkm)vy@Rp#}DgI>)^j5FBhu#-03>mlRPb3&IB#Ku{&S9G~_%zI73 zx*U=-Wu{{UWl|Pe*jl5$nvM+$D~Q?C&{wBwIwUm4$b)+Ko?gURG#D2;$xtq)DIPH& zZZ))J#$-H?{n2-uL;GX6P4lpp8+gngWyyo)A$Ge0@e6|FpGI3Gt3O(m$n*?}vUAv%ExT(~4N9IO+bf^&&`3fd7+nBVCL zI`~SpUh+1%VveK2SE|jSe4ucZUOvlXoB@&z8$`J5I9A6v9 zNw!)!oKfo=++7Lc;c?Q5fHsy=IE*6%nyV2bjuV+5We9{!z(H^O#?jUShBPgICf%%F%h5VX(x66tqzABe*{iOj+gD30iMd>RkR-T_E{ceZ9CNe=RDabzL1Mjx z(b05EFr;W)zU;b2=UTP)wmnStfzo{F2MyXSjD2Gp%?6yUdZqO%n?`p*9_D7;wMNym z2gm`+!t0CC(#Qc944RBwu~LOWj6oK?Qr)kW;Mo2=W8@(Qxb=!#G5?w&2b9xJlmtDj zhYQ1ohDg>zg4k35nunweU(hK+6*Vlma3ouiGq zYdftrsp}M5&~@y7onTWOdaQ~xyi|e;-Yol=$VG`4pWS8cEPZ-zIj)u<_M_f3DQ-ou2q{M zhlB>oM+nZ*eAci?4!RkH<~S-8clD=p!%#=$EKHx3s&#> zehdHg8GFY)7a}jWpsrE(x?6TmK^aF{&FFqPNE9A8aB^r)&S@z2NnNvRI76o8oXmH? zs4D7x6WEYcQ9vOuTJ!H2azd*M*|EZU=razfN|<+>8(w{?V)jsF)JlGnRxNYORmey3##)FD#X2JQsQff9`uYNaqjnkl{lVmSi@bWqs?tjo1?qW)_Q~z@q900*kSC!mYVckFX<$euf;>Jl8nDA5^OaZW`T>+QJ`X$k5U& z6Jt>35=*KQbnh4(p{%^932R$-LxNjjdy(R*dlHQw3pt| zm>*`zxwbYJbit`vt9Q^@n4TIhj#ewaGYCF+zqB_W`{G>{o}K|i8zdc2)KJ(7TbRV5 zS2F1L)du@dmb|$$^;l!G6Ab34x&f5Mbk=u*-1@P?2-7 zqnQ?FX-!+f07$7-L5yh3pJd5r8E#zM8w?=65yxsh{Q>X_fTD4jMv2%FBVosk4HP2C zlo6E`!kX4gb6|^<%bJ^&&!Z7GBFhSJsah=KAcdpS)fKQ@BEURJ1fEiS`pVb5z^B`f z5vJ)dfzXViE%UQ1`C~*$!>X2s2SREZW_sdf9Z@Mq!jvOO8CDDD8fN<8XF@$AimKQ{ z!DY+mU?I5s7E<8Z)(ZhewPGBEu0FA`4l@*rJLgzFh0@xSsEYffgM1oMoT`nmGInNN zVJ)UNE)nmjQKJSi2pWe}hlqwX3wMNVZ39`|snK8?<8>GS)VLX8-eLEmwi#sRURNES zND^T4_*O5oEXa+c|6~8d4LYC~UW1;5J)#&I-Ov1DktZGs)dnb)j*relkUS&D`Y5Q@f?zJ*ISZqsGuuo?Y{}e(Gnwx+3bMG{NlnOdBa@=|LQ8tZgzh5( ze_FLO7#eOf2G&I@g+?$BgPl)SJ-?~6NMUmkO2$Vsx85Z7cACS!^`@|ILoS8z`AwFn9g)o= zgyxSe(n~D(W3$5j$}!9ki&H-TD^A6HtvHucu;KzFOq4^MT@|O0$;6u$e-59sRL|ux zFQz=djFXPR8P221Gd5EgISnQbNuNa`ITq$llK<~B9-6BAuE!Qj%(7{Bv}~bm=K?hP z3pi&1cmpTkACnMFH&?Gr#BXsP@B&%z0-;Iy^g;qQkki5B1atMJRr z0O7@uJXDYwXC~DVHv=IiVg|4UA5^FG^ zvF>Ay)w(>PjVx#^#NZX~#^ zkzfiU!EN$-lMoCj-q@itmqB62!D;V~f>Qwk!cK$AJr)U4IR2k?2k9n`>^Z_mR1b#& z(ZivDYuSh4(13jgpEBS(>0GhMWrfy&&rqVHy zGJ~s?dH7=hoXwP-RIs-`25K<3JsQT_?HL1{&3FThor;%}+mms)Iz~#27qzEiv2=`} zSPX4Hr@_xLLPEINJ|BablbHjoY+umh~iXmG8)*bdXSm8HS5iU%{_+e_ZgMzCBtaD9(B5B5liE9zabqd_`N+^Zl6 zb(_(|J=}&Mj$skv_7dB$rb%c;(%EAOsiRw69+i1*&5jzJ2t)?VY2&YR z5pvq&h*(;-%%qbeIGxuZslAZ6k0zbK=uRA`2wCniM6kRSVWKcu62L%CikVe@=v;!t z_DtN~n`D82?S;u;u3Q{Lm2*(#*gZMPLgj*fy3#Bp1ggA;wJqLL7A&->xLCFZZHtUh zSa2P8=6oct262V^f8)>{j0H=)B)-y1X^*QEKVklgkk2~WXDz9R;5o7{Ye>l-M_q|>+~Q97z37ON?pb0Z zZ`G|tQ!&4gpbjYUz%m_F0E>pPXyOb0Q?}Ru}h0%X9kjLbZT1aErl?ck&xuLyi=aQvc)b~83F~R#UgEsg{TNB zfK|iakptEQuahPZ67zBq^S7Pk-AwC>IZ?+i%aR_8v!s?3#I8xzqy@6pS-E2-r2xP} zoB@3bdtdlwIM!Ej*oNjKe$h7Z7=P*O>%@sVpo`d>KrRmm!}lt1^SXMwRX-K07TE?$eg{dt7Yaxtr~xA7fx~jHQ&yxzQHS*o30?kH+kWr zeI^$+e{{g+kH~wylEr`vlKMluB2OUE*%?d*qHflJFR2#=6+d*!IK$ z5~8BOB~mewImPV>lQ}CYQYO5p;7c&4NasI=J#4GWf=NsUqDYv#VrHVjY%*`7T7~(3 zj(ns+Y%0y&)+yBKC01M7DO#}IZtuX2F1VRMIH<39?@*#aFjJvgy7{+Ga#g}l3;}>N z837(iXT>n5aQe(m;5F6NvzZC(lqxb+pC;dSCZ`${Yvv30P6Xav+keAe9TvJu@7(^K zjDaVI<;bcpPW5)ikoOLz1hB%Y8`GCM#wJ+~J)MY%msT@k=>#Nr=3q)7ohYb|&KoQC zW`39>KghPvc(=k1o@Wkhk6#ddTA~zCyf{|ukW-7|scU9cTa04pJ6;X}vt>3}vHkQQ zd$)UJf|)zDJ#mDJwu|T*fA9A05tr0d>l8_9Dh8ujyZDbR`F#dntA#J1qnqcNF=MFCmZRpcmZGEaC@_B&Z+YRia0in`} zPZZqxXC0b=te|GF#S1DI{>HvilaN#&kUWB2!D!vI{%?KAf V{bwE8l^H(k5dK65{rtnt{{jI;$2b50 literal 23841 zcmeHPOK%*<5zg|ZWmAl#CMC<`*Ld_I$24D-WNB?l=8{W=HXkxc*-iq7SniIfwU@ip z%&aU@FyQ2rAVC7e2#|wMIRuIateG&d`u4h54k0XfxxNi*YwQJ%q~eQmH`ri zwr8fgy1Tl%`s=FhCjaavnMvBcx$WJl8fTq*#m%j%Wp?JDt!@pg3bxP}8I^~^)<8C8M>r6Z$FIthU|GA5tR+J0dD|f2X^;*p-`E_@u zxV7cD1>OSMQ}S*X-7Vuko%gM>v)q7s$ zQ+MA6S=Su@(&#uaYj_UV0p6SM+Jl6Sj+HB(KQ>=46|2h)&=i;ccaZjMHP)+@lJ)+8 zOd4PGl76F)={xHhkDewyrM2~Xy*i#*4u4wjH4gce+BWnNJx6*ul~b&>CKS2GJ5xAL z5>i_8HjC9N@E`wD{xwyC7+Fb0&y&8=+NM*kG&a*S^%=d2j1H0R5-Md%A>BqNwsHhp z2?=Mzaa&W5k}SxivXRa@YxHm$;C`sIcB@)1rq$$-(alJ>#LG^p0wNkGF@hA|=qYR- zJfsq_{;M@M9k-HBc!pK7a?dHJ@?&i;*FnFW6hT4TISsu;C{&WsuWh-|Di6#>TRCmi zPaOLx)ICNQQqX6x^0hXJpCd+9iBEC_k-Z&0?=U!TWgL+FU zdXXHK>fIvOVS{k}Mmoww)FKCnh|#ioRqM5l7W7arNyYi2X-n?{q@X!+BHj$Pk#`Kc z+5s&c?}KC5@nxly5Wt=c@tgKh->$=3_^NN9m#B_qqacyM-fv_xBg0a|Md1^?q?5wglfZEF} zMjH|v?_~%~yaHs>v!tU^KAF+m8+=_4CYQ6M3sGGnr5yjK2y|CHft?^SKFko9hX543 zjrgFc0`i+y{OP#j6`d^n8Pdz!wH9rYA`MzJO!@$DmfXq~nrOYoidgWqhDe-s=w`Tq z_Hau{q1_wSS<-A5Xs7QK0ZG?y-Eth-wOOgYZB3AV5R(U*UuOe>_D?nZIy`N-#jV@h zv^TGkJqylez2aJ<Ri!(az=)f$P@MU7f$%?=wdpgXMx#14|!2bqc$2hrt0mX>D<%Vz$&OeBPh^$-W46oNQL3V`^S3sAie~@`2JC|HzPMV`ZTRR#*^iXauRi&)F~&!l#@sq7K_L= z!Q~^KqO1lVn9(T(*YRjGmR~9%xmUcVEHzL)|x~Y+((_{ z^)184ZiO1*sqcr#NEFOcurhXzD=^kjajZ?uG#2f13-x zc|$=04-h`5-2vhAl>J8b2wiQp&7dW}m~9Fm-lHI17{}3_1qs|>F^iNq^6}SxDSM2* z7`rv01p~R8!M>5DbUt;zLJLmOtq#zp0j@H_mIKyHU;(NsXi!x}60FG}w0N{#V>Nm?`v`BHoUGd5 zA!4|!Hqbhm6Y6r3*Co1xBB5PS7ov+nIyD#fY0E-L>stK;=P9(pQQk@_l}THJ46Ia_`?9^KnSUdzA#7% z4+JKqHv!Xu{|g?NdrNJ;$IsH|CHBnYD`7K55`Q$L)fTSeD>9aoVJl=2@^HZm>D_{) zT-;y=&8vl9(4z&v66RwX;kx{do&v%5%z*{J0C%~USa&GOt#7!cWdRS>aJLiR(lVlX zoWgK8#?xd?hP_P58DKppx{2oz9hG)&1J{6p%(|N7O^TG5;6<{g5he%hzqURs}so?isS8T$e5}VNA(KP`)=a9GCLwp!JHz`X@XSQA#d>7;Xp{ykrJV_07^r#3hmyh} zWoGA+j(S}p^#@5aF9}E*e|gQNAx$Ya!NbDKjCxm^%hbIp)^a-@l>{CMGd0*2;SOo@ z{EGCRfT}6aNuTUD`oe9Q#PaO}!Y5sTmHynm8lebVOJn zyssUs?Gj8enEn6(-T_#POr{dy9e}mQCIem!79W7M=t?Fv)U|)GmM2DJgOY{;D3#lj zG$%mw=A=)%$d58Ho0H^`VYUO%3w9~td1VQ%kVa4A0bu#8P7muZ5_j4R)00|2W(N3T z{?LAz>#$#D8Yjf);p!*V#+ld?+2~`%EI3c%uYDc1&oD+{(RZh8(A;EBP*Qb(43WLO+6`mGuC{n7e8wjm8jlXu0zloCpY^j#u zGGP5F)^gZhIWNez%V*UV{RKY~z-Kp^f%k88sk^^oq_UihRhITwZU_2=-3SxU9`tF2 zoQg#?;eMw^2;R(u26Z#cdB2DJJ`U#SSCj-G2ks$}+Q$jhJ{oYPM&6*m3$NQyB%ufH z?h3s^|0KRh1Vi$K#R+X9%d@LgLV)Np0HO=%@8g%MJg#7nxWYnjnz&pAw@3lCID)3% zvyz^h1tajj72GAs`P#1l`nevxeI=a@m>?xT#4GB-eih))LejM?=}d(e8YPN|N_K}` z00=b294PQbx6JA600}WxfQ0bZejRocuLKTleAhlR<=~0FWjh3Yb#HiepY`^tqY9ce<(P;d~sFIx-E7D+x956enAu=QR`0 zgZ1oIGH%^tYaF)f*2Uf2oE2Dd>sr)`#}}4py{$r$<&I~Rpp$TLOc-6=62`NLSZV8( zWD6b7AjLBC=18?^U6%xFtm&KaK@a(**w~T=kx}OaLKb=_o&wzyFXSwz$U)O&TGK;8 zE!(1_Xk8bh82W+NL+U=wclQk&9!uouSj4aEC_Nd%`?|4gM*Dy4 zARlDlDipjljE#YXsW*n$4jdN97IzEt0mDDR2*UuQ`YfJx616Z@MCy$`uzZ|%$@KEl z$_hXDAGd0y$E4<~8;?ay@F{XgR4*DAgy0n3(shdTA%Uo10?`@Lk3_11@nn;IrYc@P zC}ADCJ25)>{OGv-hpzkT%H|oj0+%LsHwedH3Y~K0Fdhcq)qT;CJBot~ zr1{Ooq1woMlJ|bMH1-{|47Mad CSet { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; public class MysqlDatetimeType { @@ -74,7 +77,7 @@ public class ExtendedBio public ExtendedBiosBioType? BioType { get; set; } public HashSet AuthorType { get; set; } }; - public enum MysqlTypesCEnum + public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value Small = 1, @@ -82,27 +85,27 @@ public enum MysqlTypesCEnum Big = 3 } - public static class MysqlTypesCEnumExtensions + public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCEnum.Invalid, - ["small"] = MysqlTypesCEnum.Small, - ["medium"] = MysqlTypesCEnum.Medium, - ["big"] = MysqlTypesCEnum.Big + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCEnumSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum MysqlTypesCSet + public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value Tea = 1, @@ -110,23 +113,23 @@ public enum MysqlTypesCSet Milk = 3 } - public static class MysqlTypesCSetExtensions + public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCSet.Invalid, - ["tea"] = MysqlTypesCSet.Tea, - ["coffee"] = MysqlTypesCSet.Coffee, - ["milk"] = MysqlTypesCSet.Milk + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - public static MysqlTypesCSet ToMysqlTypesCSet(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCSetSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 433326a3..fc792791 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -45,8 +45,8 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public class InsertMysqlTypesArgs + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public class InsertMysqlNumericTypesArgs { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -63,20 +63,8 @@ public class InsertMysqlTypesArgs public double? CFloat { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { var queryParams = new Dictionary(); queryParams.Add("c_bool", args.CBool); @@ -94,31 +82,19 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) queryParams.Add("c_float", args.CFloat); queryParams.Add("c_double", args.CDouble); queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_nchar", args.CNchar); - queryParams.Add("c_national_char", args.CNationalChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_tinytext", args.CTinytext); - queryParams.Add("c_mediumtext", args.CMediumtext); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_longtext", args.CLongtext); - queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_enum", args.CEnum); - queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlTypesSql, queryParams); + await connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlTypesBatchArgs + public class InsertMysqlNumericTypesBatchArgs { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -135,20 +111,8 @@ public class InsertMysqlTypesBatchArgs public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task InsertMysqlTypesBatch(List args) + public async Task InsertMysqlNumericTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; var config = new CsvConfiguration(CultureInfo.CurrentCulture) @@ -171,15 +135,11 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -189,7 +149,7 @@ public async Task InsertMysqlTypesBatch(List args) var loader = new MySqlBulkLoader(connection) { Local = true, - TableName = "mysql_types", + TableName = "mysql_numeric_types", FileName = "input.csv", FieldTerminator = ",", FieldQuotationCharacter = '"', @@ -197,14 +157,14 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; - public class GetMysqlTypesRow + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public class GetMysqlNumericTypesRow { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -221,26 +181,14 @@ public class GetMysqlTypesRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task GetMysqlTypes() + public async Task GetMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql); return result; } } @@ -250,11 +198,11 @@ public async Task GetMysqlTypes() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public class GetMysqlTypesCntRow + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public class GetMysqlNumericTypesCntRow { public long Cnt { get; set; } public bool? CBool { get; set; } @@ -272,6 +220,186 @@ public class GetMysqlTypesCntRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } + }; + public async Task GetMysqlNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); + } + + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlNumericTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlNumericTypesSql, transaction: this.Transaction); + } + + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public class InsertMysqlStringTypesArgs + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_nchar", args.CNchar); + queryParams.Add("c_national_char", args.CNationalChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_tinytext", args.CTinytext); + queryParams.Add("c_mediumtext", args.CMediumtext); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_longtext", args.CLongtext); + queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_enum", args.CEnum); + queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams, transaction: this.Transaction); + } + + public class InsertMysqlStringTypesBatchArgs + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task InsertMysqlStringTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public class GetMysqlStringTypesRow + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task GetMysqlStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); + } + + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public class GetMysqlStringTypesCntRow + { + public long Cnt { get; set; } public string CChar { get; set; } public string CNchar { get; set; } public string CNationalChar { get; set; } @@ -282,16 +410,16 @@ public class GetMysqlTypesCntRow public string CLongtext { get; set; } public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task GetMysqlTypesCnt() + public async Task GetMysqlStringTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql); return result; } } @@ -301,22 +429,22 @@ public async Task GetMysqlTypesCnt() throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); } - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlTypesSql); + await connection.ExecuteAsync(TruncateMysqlStringTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlStringTypesSql, transaction: this.Transaction); } private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; @@ -641,7 +769,7 @@ public async Task TruncateMysqlBinaryTypes() await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; public class GetMysqlFunctionsRow { public int? MaxInt { get; set; } diff --git a/examples/MySqlConnectorDapperLegacyExample/Utils.cs b/examples/MySqlConnectorDapperLegacyExample/Utils.cs index 04937233..88c13183 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Utils.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Utils.cs @@ -30,7 +30,7 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlTypesCSetTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); } @@ -40,16 +40,16 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class MysqlTypesCSetTypeHandler : SqlMapper.TypeHandler> + private class MysqlStringTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlTypesCSetSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToMysqlStringTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } @@ -70,13 +70,13 @@ public override void SetValue(IDbDataParameter parameter, HashSet setVal) + if (value is HashSet setVal) return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index a17db6c2..5b6f3d38 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -27,14 +27,14 @@ "tables": [ { "rel": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "columns": [ { "name": "c_bool", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -44,7 +44,7 @@ "name": "c_boolean", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -54,7 +54,7 @@ "name": "c_tinyint", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -64,7 +64,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -74,7 +74,7 @@ "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -84,7 +84,7 @@ "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -94,7 +94,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -104,7 +104,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -114,7 +114,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -124,7 +124,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -134,7 +134,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -144,7 +144,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -154,7 +154,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -164,7 +164,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -174,17 +174,24 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_string_types" + }, + "columns": [ { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -194,7 +201,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -204,7 +211,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -214,7 +221,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -224,7 +231,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -234,7 +241,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -244,7 +251,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -254,7 +261,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -264,7 +271,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -274,7 +281,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -284,20 +291,20 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" } }, { "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" } } ] @@ -528,7 +535,7 @@ ], "enums": [ { - "name": "mysql_types_c_enum", + "name": "mysql_string_types_c_enum", "vals": [ "small", "medium", @@ -536,7 +543,7 @@ ] }, { - "name": "mysql_types_c_set", + "name": "mysql_string_types_c_set", "vals": [ "tea", "coffee", @@ -622,8 +629,8 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", "cmd": ":exec", "parameters": [ { @@ -633,7 +640,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -648,7 +655,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -663,7 +670,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -678,7 +685,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -693,7 +700,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -708,7 +715,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -723,7 +730,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -738,7 +745,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -753,7 +760,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -768,7 +775,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -783,7 +790,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -798,7 +805,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -813,7 +820,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -828,7 +835,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -843,206 +850,26 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } } ], "comments": [ - " Basic types " + " Numeric types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_types" + "name": "mysql_numeric_types" } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", "cmd": ":copyfrom", "parameters": [ { @@ -1052,7 +879,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1067,7 +894,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1082,7 +909,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1097,7 +924,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1112,7 +939,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1127,7 +954,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1142,7 +969,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1157,7 +984,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1172,7 +999,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1187,7 +1014,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1202,7 +1029,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1217,7 +1044,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1232,7 +1059,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1247,7 +1074,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1262,265 +1089,85 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1531,7 +1178,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1542,7 +1189,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1553,7 +1200,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1564,7 +1211,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1575,7 +1222,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1586,7 +1233,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1597,7 +1244,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1608,7 +1255,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1619,332 +1266,748 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_char", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_char" + "originalName": "c_bool" }, { - "name": "c_nchar", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_nchar" + "originalName": "c_boolean" }, { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" }, { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" }, { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" }, { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" }, { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" }, { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" }, { - "name": "c_json", + "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "float" }, - "originalName": "c_json" + "originalName": "c_float" }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json_string_override" + "originalName": "c_numeric" }, { - "name": "c_enum", - "length": 6, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "decimal" }, - "originalName": "c_enum" + "originalName": "c_decimal" }, { - "name": "c_set", - "length": 15, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_dec" }, { - "name": "c_bool", - "length": 1, + "name": "c_fixed", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "decimal" }, - "originalName": "c_bool" + "originalName": "c_fixed" }, { - "name": "c_boolean", - "length": 1, + "name": "c_double", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_boolean" + "originalName": "c_double" }, { - "name": "c_tinyint", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_tinyint" + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } }, { - "name": "c_smallint", + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "smallint" + "name": "char" }, - "originalName": "c_smallint" + "originalName": "c_char" }, { - "name": "c_mediumint", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumint" + "name": "char" }, - "originalName": "c_mediumint" + "originalName": "c_nchar" }, { - "name": "c_int", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "char" }, - "originalName": "c_int" + "originalName": "c_national_char" }, { - "name": "c_integer", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "varchar" }, - "originalName": "c_integer" + "originalName": "c_varchar" }, { - "name": "c_bigint", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "tinytext" }, - "originalName": "c_bigint" + "originalName": "c_tinytext" }, { - "name": "c_float", + "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "float" + "name": "mediumtext" }, - "originalName": "c_float" + "originalName": "c_mediumtext" }, { - "name": "c_numeric", - "length": 10, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" + "originalName": "c_text" }, { - "name": "c_decimal", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "longtext" }, - "originalName": "c_decimal" + "originalName": "c_longtext" }, { - "name": "c_dec", - "length": 10, + "name": "c_json", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_dec" + "originalName": "c_json" }, { - "name": "c_fixed", - "length": 10, + "name": "c_json_string_override", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_fixed" + "originalName": "c_json_string_override" }, { - "name": "c_double", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_double" + "originalName": "c_enum" }, { - "name": "c_double_precision", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_set" }, - "originalName": "c_double_precision" + "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } }, { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1955,7 +2018,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1966,7 +2029,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1977,7 +2040,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -1988,7 +2051,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -1999,7 +2062,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -2010,7 +2073,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -2021,7 +2084,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -2032,7 +2095,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2043,7 +2106,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2054,10 +2117,10 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" }, "originalName": "c_enum" }, @@ -2065,10 +2128,10 @@ "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" }, "originalName": "c_set" } @@ -2076,8 +2139,8 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -2818,7 +2881,7 @@ "filename": "query.sql" }, { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index 93acf9db..dba6cf74 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.message +++ b/examples/MySqlConnectorDapperLegacyExample/request.message @@ -2,46 +2,47 @@ Г 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╘ *examples/MySqlConnectorDapperLegacyExamplecsharpЗ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorDapperLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner¤public"Уpublic· -  mysql_types$ -c_bool0R  mysql_typesb tinyint' - c_boolean0R  mysql_typesb tinyint' - c_tinyint0R  mysql_typesb tinyint2 - -c_smallint0         R  mysql_typesb -smallint4 - c_mediumint0         R  mysql_typesb  mediumint( -c_int0         R  mysql_typesbint, - c_integer0         R  mysql_typesbint. -c_bigint0         R  mysql_typesbbigint, -c_float0         R  mysql_typesbfloat' +./dist/LocalRunnerЖpublic"Ьpublicп +mysql_numeric_types, +c_bool0Rmysql_numeric_typesb tinyint/ + c_boolean0Rmysql_numeric_typesb tinyint/ + c_tinyint0Rmysql_numeric_typesb tinyint: + +c_smallint0         Rmysql_numeric_typesb +smallint< + c_mediumint0         Rmysql_numeric_typesb  mediumint0 +c_int0         Rmysql_numeric_typesbint4 + c_integer0         Rmysql_numeric_typesbint6 +c_bigint0         Rmysql_numeric_typesbbigint4 +c_float0         Rmysql_numeric_typesbfloat/ c_decimal0 -R  mysql_typesb decimal# +Rmysql_numeric_typesb decimal+ c_dec0 -R  mysql_typesb decimal' +Rmysql_numeric_typesb decimal/ c_numeric0 -R  mysql_typesb decimal% +Rmysql_numeric_typesb decimal- c_fixed0 -R  mysql_typesb decimal. -c_double0         R  mysql_typesbdouble8 -c_double_precision0         R  mysql_typesbdouble* -c_char0         R  mysql_typesbchar+ -c_nchar0         R  mysql_typesbchar3 -c_national_char0         R  mysql_typesbchar' - c_varchar0dR  mysql_typesb varchar2 - -c_tinytext0         R  mysql_typesb -tinytext6 - c_mediumtext0         R  mysql_typesb  -mediumtext* -c_text0         R  mysql_typesbtext2 - -c_longtext0         R  mysql_typesb -longtext* -c_json0         R  mysql_typesbjson: -c_json_string_override0         R  mysql_typesbjson/ -c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_setШ +Rmysql_numeric_typesb decimal6 +c_double0         Rmysql_numeric_typesbdouble@ +c_double_precision0         Rmysql_numeric_typesbdouble├ +mysql_string_types1 +c_char0         Rmysql_string_typesbchar2 +c_nchar0         Rmysql_string_typesbchar: +c_national_char0         Rmysql_string_typesbchar. + c_varchar0dRmysql_string_typesb varchar9 + +c_tinytext0         Rmysql_string_typesb +tinytext= + c_mediumtext0         Rmysql_string_typesb  +mediumtext1 +c_text0         Rmysql_string_typesbtext9 + +c_longtext0         Rmysql_string_typesb +longtext1 +c_json0         Rmysql_string_typesbjsonA +c_json_string_override0         Rmysql_string_typesbjson= +c_enum0Rmysql_string_typesbmysql_string_types_c_enum; +c_set0Rmysql_string_typesbmysql_string_types_c_setШ mysql_datetime_types3 c_year0         Rmysql_datetime_typesbyear3 c_date0         Rmysql_datetime_typesbdate2 @@ -73,9 +74,9 @@ c_longblob0 id0         Rbooksbbigint$ name0         Rbooksbtext+ author_id0         Rbooksbbigint) - description0         Rbooksbtext"( -mysql_types_c_enumsmallmediumbig"& -mysql_types_c_setteacoffeemilk"1 + description0         Rbooksbtext"/ +mysql_string_types_c_enumsmallmediumbig"- +mysql_string_types_c_setteacoffeemilk"1 bios_bio_type Autobiography BiographyMemoir". bios_author_typeAuthorEditor Translator"▄extended╧ @@ -83,9 +84,9 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_typeХ -ў -INSERT INTO mysql_types + author_type0Rextendedbiosbbios_author_type  +╜ +INSERT INTO mysql_numeric_types ( c_bool, c_boolean, @@ -95,64 +96,39 @@ INSERT INTO mysql_types c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, - c_char, - c_nchar, - c_national_char, - c_varchar, - c_tinytext, - c_mediumtext, - c_text, - c_longtext, - c_json, - c_json_string_override, - c_enum, - c_set + c_decimal, + c_dec, + c_numeric, + c_fixed, + c_float, + c_double, + c_double_precision ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlNumericTypes:exec*@< +c_bool0Rpublicmysql_numeric_typesb tinyintzc_bool*FB + c_boolean0Rpublicmysql_numeric_typesb tinyintz c_boolean*FB + c_tinyint0Rpublicmysql_numeric_typesb tinyintz c_tinyint*RN -c_smallint0         Rpublic mysql_typesb +c_smallint0         Rpublicmysql_numeric_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D@ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> : +c_smallint*UQ + c_mediumint0         Rpublicmysql_numeric_typesb  mediumintz c_mediumint*C? +c_int0         Rpublicmysql_numeric_typesbintzc_int*KG + c_integer0         Rpublicmysql_numeric_typesbintz c_integer*LH +c_bigint0         Rpublicmysql_numeric_typesbbigintzc_bigint*F B c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 -2 +Rpublicmysql_numeric_typesb decimalz c_decimal*> +: c_dec0 -Rpublic mysql_typesb decimalzc_dec*> : +Rpublicmysql_numeric_typesb decimalzc_dec*F B c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*: 6 +Rpublicmysql_numeric_typesb decimalz c_numeric*B > c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*A = -c_float0         Rpublic mysql_typesbfloatzc_float*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF - -c_tinytext0         Rpublic mysql_typesb -tinytextz -c_tinytext*PL - c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF - -c_longtext0         Rpublic mysql_typesb -longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set2 Basic types : query.sqlB  mysql_typesО -ЎINSERT INTO mysql_types +Rpublicmysql_numeric_typesb decimalzc_fixed*I E +c_float0         Rpublicmysql_numeric_typesbfloatzc_float*LH +c_double0         Rpublicmysql_numeric_typesbdoublezc_double*`\ +c_double_precision0         Rpublicmysql_numeric_typesbdoublezc_double_precision2 Numeric types : query.sqlBmysql_numeric_typesЎ +╝INSERT INTO mysql_numeric_types ( c_bool, c_boolean, @@ -162,106 +138,62 @@ c_longtext*>: c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, - c_nchar, - c_national_char, - c_varchar, - c_tinytext, - c_mediumtext, - c_text, - c_longtext, - c_json, - c_json_string_override, - c_enum, - c_set + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlNumericTypesBatch :copyfrom*@< +c_bool0Rpublicmysql_numeric_typesb tinyintzc_bool*FB + c_boolean0Rpublicmysql_numeric_typesb tinyintz c_boolean*FB + c_tinyint0Rpublicmysql_numeric_typesb tinyintz c_tinyint*RN -c_smallint0         Rpublic mysql_typesb +c_smallint0         Rpublicmysql_numeric_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D@ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A = -c_float0         Rpublic mysql_typesbfloatzc_float*> -: +c_smallint*UQ + c_mediumint0         Rpublicmysql_numeric_typesb  mediumintz c_mediumint*C? +c_int0         Rpublicmysql_numeric_typesbintzc_int*KG + c_integer0         Rpublicmysql_numeric_typesbintz c_integer*LH +c_bigint0         Rpublicmysql_numeric_typesbbigintzc_bigint*I E +c_float0         Rpublicmysql_numeric_typesbfloatzc_float*F +B c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*> : +Rpublicmysql_numeric_typesb decimalz c_numeric*F B c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublicmysql_numeric_typesb decimalz c_decimal*> : c_dec0 -Rpublic mysql_typesb decimalzc_dec*: 6 +Rpublicmysql_numeric_typesb decimalzc_dec*B > c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF - -c_tinytext0         Rpublic mysql_typesb -tinytextz -c_tinytext*PL - c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +Rpublicmysql_numeric_typesb decimalzc_fixed*LH +c_double0         Rpublicmysql_numeric_typesbdoublezc_double*`\ +c_double_precision0         Rpublicmysql_numeric_typesbdoublezc_double_precision: query.sqlBmysql_numeric_typesи +╞SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1GetMysqlNumericTypes:one"4 +c_bool0Rmysql_numeric_typesb tinyintzc_bool": + c_boolean0Rmysql_numeric_typesb tinyintz c_boolean": + c_tinyint0Rmysql_numeric_typesb tinyintz c_tinyint"F -c_longtext0         Rpublic mysql_typesb -longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set: query.sqlB  mysql_typesА -╚SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", -c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"2 - c_tinyint0R  mysql_typesb tinyintz c_tinyint"> - -c_smallint0         R  mysql_typesb +c_smallint0         Rmysql_numeric_typesb smallintz -c_smallint"A - c_mediumint0         R  mysql_typesb  mediumintz c_mediumint"/ -c_int0         R  mysql_typesbintzc_int"7 - c_integer0         R  mysql_typesbintz c_integer"8 -c_bigint0         R  mysql_typesbbigintzc_bigint"5 -c_float0         R  mysql_typesbfloatzc_float"2 +c_smallint"I + c_mediumint0         Rmysql_numeric_typesb  mediumintz c_mediumint"7 +c_int0         Rmysql_numeric_typesbintzc_int"? + c_integer0         Rmysql_numeric_typesbintz c_integer"@ +c_bigint0         Rmysql_numeric_typesbbigintzc_bigint"= +c_float0         Rmysql_numeric_typesbfloatzc_float": c_decimal0 -R  mysql_typesb decimalz c_decimal"* +Rmysql_numeric_typesb decimalz c_decimal"2 c_dec0 -R  mysql_typesb decimalzc_dec"2 +Rmysql_numeric_typesb decimalzc_dec": c_numeric0 -R  mysql_typesb decimalz c_numeric". +Rmysql_numeric_typesb decimalz c_numeric"6 c_fixed0 -R  mysql_typesb decimalzc_fixed"8 -c_double0         R  mysql_typesbdoublezc_double"L -c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_char0         R  mysql_typesbcharzc_char"4 -c_nchar0         R  mysql_typesbcharzc_nchar"D -c_national_char0         R  mysql_typesbcharzc_national_char"2 - c_varchar0dR  mysql_typesb varcharz c_varchar"> - -c_tinytext0         R  mysql_typesb -tinytextz -c_tinytext"D - c_mediumtext0         R  mysql_typesb  -mediumtextz c_mediumtext"2 -c_text0         R  mysql_typesbtextzc_text"> - -c_longtext0         R  mysql_typesb -longtextz -c_longtext"2 -c_json0         R  mysql_typesbjsonzc_json"R -c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 -c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlк -╧SELECT +Rmysql_numeric_typesb decimalzc_fixed"@ +c_double0         Rmysql_numeric_typesbdoublezc_double"T +c_double_precision0         Rmysql_numeric_typesbdoublezc_double_precision: query.sqlЖ +БSELECT COUNT(*) AS cnt, c_bool, c_boolean, @@ -277,7 +209,147 @@ c_longtext"2 c_dec, c_fixed, c_double, - c_double_precision, + c_double_precision +FROM mysql_numeric_types +GROUP BY + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision +LIMIT 1GetMysqlNumericTypesCnt:one" +cnt0         @bbigint"4 +c_bool0Rmysql_numeric_typesb tinyintzc_bool": + c_boolean0Rmysql_numeric_typesb tinyintz c_boolean": + c_tinyint0Rmysql_numeric_typesb tinyintz c_tinyint"F + +c_smallint0         Rmysql_numeric_typesb +smallintz +c_smallint"I + c_mediumint0         Rmysql_numeric_typesb  mediumintz c_mediumint"7 +c_int0         Rmysql_numeric_typesbintzc_int"? + c_integer0         Rmysql_numeric_typesbintz c_integer"@ +c_bigint0         Rmysql_numeric_typesbbigintzc_bigint"= +c_float0         Rmysql_numeric_typesbfloatzc_float": + c_numeric0 +Rmysql_numeric_typesb decimalz c_numeric": + c_decimal0 +Rmysql_numeric_typesb decimalz c_decimal"2 +c_dec0 +Rmysql_numeric_typesb decimalzc_dec"6 +c_fixed0 +Rmysql_numeric_typesb decimalzc_fixed"@ +c_double0         Rmysql_numeric_typesbdoublezc_double"T +c_double_precision0         Rmysql_numeric_typesbdoublezc_double_precision: query.sqlQ +"TRUNCATE TABLE mysql_numeric_typesTruncateMysqlNumericTypes:exec: query.sqlе + +Л +INSERT INTO mysql_string_types +( + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlStringTypes:exec*EA +c_char0         Rpublicmysql_string_typesbcharzc_char*GC +c_nchar0         Rpublicmysql_string_typesbcharzc_nchar*WS +c_national_char0         Rpublicmysql_string_typesbcharzc_national_char*EA + c_varchar0dRpublicmysql_string_typesb varcharz c_varchar*QM + +c_tinytext0         Rpublicmysql_string_typesb +tinytextz +c_tinytext*WS + c_mediumtext0         Rpublicmysql_string_typesb  +mediumtextz c_mediumtext*EA +c_text0         Rpublicmysql_string_typesbtextzc_text*QM + +c_longtext0         Rpublicmysql_string_typesb +longtextz +c_longtext*E A +c_json0         Rpublicmysql_string_typesbjsonzc_json*e +a +c_json_string_override0         Rpublicmysql_string_typesbjsonzc_json_string_override*Q M +c_enum0Rpublicmysql_string_typesbmysql_string_types_c_enumzc_enum*N J +c_set0Rpublicmysql_string_typesbmysql_string_types_c_setzc_set2 String types : query.sqlBmysql_string_typesЭ + +КINSERT INTO mysql_string_types +( + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlStringTypesBatch :copyfrom*EA +c_char0         Rpublicmysql_string_typesbcharzc_char*GC +c_nchar0         Rpublicmysql_string_typesbcharzc_nchar*WS +c_national_char0         Rpublicmysql_string_typesbcharzc_national_char*EA + c_varchar0dRpublicmysql_string_typesb varcharz c_varchar*QM + +c_tinytext0         Rpublicmysql_string_typesb +tinytextz +c_tinytext*WS + c_mediumtext0         Rpublicmysql_string_typesb  +mediumtextz c_mediumtext*EA +c_text0         Rpublicmysql_string_typesbtextzc_text*QM + +c_longtext0         Rpublicmysql_string_typesb +longtextz +c_longtext*E A +c_json0         Rpublicmysql_string_typesbjsonzc_json*e +a +c_json_string_override0         Rpublicmysql_string_typesbjsonzc_json_string_override*Q M +c_enum0Rpublicmysql_string_typesbmysql_string_types_c_enumzc_enum*N J +c_set0Rpublicmysql_string_typesbmysql_string_types_c_setzc_set: query.sqlBmysql_string_typesП +пSELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1GetMysqlStringTypes:one"9 +c_char0         Rmysql_string_typesbcharzc_char"; +c_nchar0         Rmysql_string_typesbcharzc_nchar"K +c_national_char0         Rmysql_string_typesbcharzc_national_char"9 + c_varchar0dRmysql_string_typesb varcharz c_varchar"E + +c_tinytext0         Rmysql_string_typesb +tinytextz +c_tinytext"K + c_mediumtext0         Rmysql_string_typesb  +mediumtextz c_mediumtext"9 +c_text0         Rmysql_string_typesbtextzc_text"E + +c_longtext0         Rmysql_string_typesb +longtextz +c_longtext"9 +c_json0         Rmysql_string_typesbjsonzc_json"Y +c_json_string_override0         Rmysql_string_typesbjsonzc_json_string_override"E +c_enum0Rmysql_string_typesbmysql_string_types_c_enumzc_enum"B +c_set0Rmysql_string_typesbmysql_string_types_c_setzc_set: query.sql╣ + +╢SELECT + COUNT(*) AS cnt, c_char, c_nchar, c_national_char, @@ -290,17 +362,8 @@ c_longtext"2 c_json_string_override, c_enum, c_set -FROM mysql_types +FROM mysql_string_types GROUP BY - c_bool, - c_boolean, - c_tinyint, - c_smallint, - c_mediumint, - c_int, - c_integer, - c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, @@ -313,50 +376,28 @@ GROUP BY c_json_string_override, c_enum, c_set -LIMIT 1GetMysqlTypesCnt:one" -cnt0         @bbigint", -c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"2 - c_tinyint0R  mysql_typesb tinyintz c_tinyint"> +LIMIT 1GetMysqlStringTypesCnt:one" +cnt0         @bbigint"9 +c_char0         Rmysql_string_typesbcharzc_char"; +c_nchar0         Rmysql_string_typesbcharzc_nchar"K +c_national_char0         Rmysql_string_typesbcharzc_national_char"9 + c_varchar0dRmysql_string_typesb varcharz c_varchar"E -c_smallint0         R  mysql_typesb -smallintz -c_smallint"A - c_mediumint0         R  mysql_typesb  mediumintz c_mediumint"/ -c_int0         R  mysql_typesbintzc_int"7 - c_integer0         R  mysql_typesbintz c_integer"8 -c_bigint0         R  mysql_typesbbigintzc_bigint"5 -c_float0         R  mysql_typesbfloatzc_float"2 - c_numeric0 -R  mysql_typesb decimalz c_numeric"2 - c_decimal0 -R  mysql_typesb decimalz c_decimal"* -c_dec0 -R  mysql_typesb decimalzc_dec". -c_fixed0 -R  mysql_typesb decimalzc_fixed"8 -c_double0         R  mysql_typesbdoublezc_double"L -c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_char0         R  mysql_typesbcharzc_char"4 -c_nchar0         R  mysql_typesbcharzc_nchar"D -c_national_char0         R  mysql_typesbcharzc_national_char"2 - c_varchar0dR  mysql_typesb varcharz c_varchar"> - -c_tinytext0         R  mysql_typesb +c_tinytext0         Rmysql_string_typesb tinytextz -c_tinytext"D - c_mediumtext0         R  mysql_typesb  -mediumtextz c_mediumtext"2 -c_text0         R  mysql_typesbtextzc_text"> +c_tinytext"K + c_mediumtext0         Rmysql_string_typesb  +mediumtextz c_mediumtext"9 +c_text0         Rmysql_string_typesbtextzc_text"E -c_longtext0         R  mysql_typesb +c_longtext0         Rmysql_string_typesb longtextz -c_longtext"2 -c_json0         R  mysql_typesbjsonzc_json"R -c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 -c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql╚ +c_longtext"9 +c_json0         Rmysql_string_typesbjsonzc_json"Y +c_json_string_override0         Rmysql_string_typesbjsonzc_json_string_override"E +c_enum0Rmysql_string_typesbmysql_string_types_c_enumzc_enum"B +c_set0Rmysql_string_typesbmysql_string_types_c_setzc_set: query.sqlO +!TRUNCATE TABLE mysql_string_typesTruncateMysqlStringTypes:exec: query.sql╚ В INSERT INTO mysql_datetime_types ( @@ -534,13 +575,14 @@ mediumblobz c_mediumblob"E c_longblob0         Rmysql_binary_typesb longblobz c_longblob: query.sqlO -!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sql╜ -Ь +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlу +┬ SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_types +FROM mysql_numeric_types +CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_typesGetMysqlFunctions:one" max_int0         @bany"# max_varchar0         @bany"% diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index d2ea81a0..d904f923 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -5,13 +5,14 @@ using System.Text.Json; namespace MySqlConnectorExampleGen; -public readonly record struct MysqlType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); +public readonly record struct MysqlNumericType(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision); +public readonly record struct MysqlStringType(string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, JsonElement? CJsonStringOverride, MysqlStringTypesCEnum? CEnum, HashSet? CSet); public readonly record struct MysqlDatetimeType(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); -public enum MysqlTypesCEnum +public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value Small = 1, @@ -19,27 +20,27 @@ public enum MysqlTypesCEnum Big = 3 } -public static class MysqlTypesCEnumExtensions +public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCEnum.Invalid, - ["small"] = MysqlTypesCEnum.Small, - ["medium"] = MysqlTypesCEnum.Medium, - ["big"] = MysqlTypesCEnum.Big + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCEnumSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum MysqlTypesCSet +public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value Tea = 1, @@ -47,23 +48,23 @@ public enum MysqlTypesCSet Milk = 3 } -public static class MysqlTypesCSetExtensions +public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCSet.Invalid, - ["tea"] = MysqlTypesCSet.Tea, - ["coffee"] = MysqlTypesCSet.Coffee, - ["milk"] = MysqlTypesCSet.Milk + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - public static MysqlTypesCSet ToMysqlTypesCSet(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCSetSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index cccb9210..aeb9959e 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -759,16 +759,16 @@ public async Task TruncateExtendedBios() } } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public readonly record struct InsertMysqlTypesArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public readonly record struct InsertMysqlNumericTypesArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CFloat, double? CDouble, double? CDoublePrecision); + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) + using (var command = new MySqlCommand(InsertMysqlNumericTypesSql, connection)) { command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); @@ -785,18 +785,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -808,7 +796,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlTypesSql; + command.CommandText = InsertMysqlNumericTypesSql; command.Transaction = this.Transaction; command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); @@ -825,24 +813,12 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public readonly record struct InsertMysqlTypesBatchArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); - public async Task InsertMysqlTypesBatch(List args) + public readonly record struct InsertMysqlNumericTypesBatchArgs(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision); + public async Task InsertMysqlNumericTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; var config = new CsvConfiguration(CultureInfo.CurrentCulture) @@ -865,16 +841,11 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -884,7 +855,7 @@ public async Task InsertMysqlTypesBatch(List args) var loader = new MySqlBulkLoader(connection) { Local = true, - TableName = "mysql_types", + TableName = "mysql_numeric_types", FileName = "input.csv", FieldTerminator = ",", FieldQuotationCharacter = '"', @@ -892,28 +863,28 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; - public readonly record struct GetMysqlTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); - public async Task GetMysqlTypes() + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public readonly record struct GetMysqlNumericTypesRow(bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CDecimal, decimal? CDec, decimal? CNumeric, decimal? CFixed, double? CDouble, double? CDoublePrecision); + public async Task GetMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesSql, connection)) + using (var command = new MySqlCommand(GetMysqlNumericTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesRow + return new GetMysqlNumericTypesRow { CBool = reader.IsDBNull(0) ? null : reader.GetBoolean(0), CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), @@ -929,19 +900,7 @@ public async Task InsertMysqlTypesBatch(List args) CNumeric = reader.IsDBNull(11) ? null : reader.GetDecimal(11), CFixed = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), - CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), - CText = reader.IsDBNull(21) ? null : reader.GetString(21), - CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CJson = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), - CEnum = reader.IsDBNull(25) ? null : reader.GetString(25).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14) }; } } @@ -958,13 +917,13 @@ public async Task InsertMysqlTypesBatch(List args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesSql; + command.CommandText = GetMysqlNumericTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesRow + return new GetMysqlNumericTypesRow { CBool = reader.IsDBNull(0) ? null : reader.GetBoolean(0), CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), @@ -980,19 +939,7 @@ public async Task InsertMysqlTypesBatch(List args) CNumeric = reader.IsDBNull(11) ? null : reader.GetDecimal(11), CFixed = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), - CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), - CText = reader.IsDBNull(21) ? null : reader.GetString(21), - CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CJson = reader.IsDBNull(23) ? null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), - CEnum = reader.IsDBNull(25) ? null : reader.GetString(25).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(14) ? null : reader.GetDouble(14) }; } } @@ -1001,22 +948,22 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public readonly record struct GetMysqlTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlTypesCEnum? CEnum, HashSet? CSet); - public async Task GetMysqlTypesCnt() + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public readonly record struct GetMysqlNumericTypesCntRow(long Cnt, bool? CBool, bool? CBoolean, short? CTinyint, short? CSmallint, int? CMediumint, int? CInt, int? CInteger, long? CBigint, double? CFloat, decimal? CNumeric, decimal? CDecimal, decimal? CDec, decimal? CFixed, double? CDouble, double? CDoublePrecision); + public async Task GetMysqlNumericTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesCntSql, connection)) + using (var command = new MySqlCommand(GetMysqlNumericTypesCntSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesCntRow + return new GetMysqlNumericTypesCntRow { Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? null : reader.GetBoolean(1), @@ -1033,19 +980,7 @@ public async Task InsertMysqlTypesBatch(List args) CDec = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CFixed = reader.IsDBNull(13) ? null : reader.GetDecimal(13), CDouble = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15), - CChar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), - CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), - CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), - CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), - CText = reader.IsDBNull(22) ? null : reader.GetString(22), - CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), - CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), - CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), - CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15) }; } } @@ -1062,13 +997,13 @@ public async Task InsertMysqlTypesBatch(List args) using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesCntSql; + command.CommandText = GetMysqlNumericTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesCntRow + return new GetMysqlNumericTypesCntRow { Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? null : reader.GetBoolean(1), @@ -1085,19 +1020,284 @@ public async Task InsertMysqlTypesBatch(List args) CDec = reader.IsDBNull(12) ? null : reader.GetDecimal(12), CFixed = reader.IsDBNull(13) ? null : reader.GetDecimal(13), CDouble = reader.IsDBNull(14) ? null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15), - CChar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), - CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), - CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), - CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), - CText = reader.IsDBNull(22) ? null : reader.GetString(22), - CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), - CJson = reader.IsDBNull(24) ? null : JsonSerializer.Deserialize(reader.GetString(24)), - CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), - CEnum = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(15) ? null : reader.GetDouble(15) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlNumericTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlNumericTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public readonly record struct InsertMysqlStringTypesArgs(string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlStringTypesCEnum? CEnum, HashSet? CSet); + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(InsertMysqlStringTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlStringTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public readonly record struct InsertMysqlStringTypesBatchArgs(string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlStringTypesCEnum? CEnum, HashSet? CSet); + public async Task InsertMysqlStringTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter?>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public readonly record struct GetMysqlStringTypesRow(string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlStringTypesCEnum? CEnum, HashSet? CSet); + public async Task GetMysqlStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlStringTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlStringTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } + } + + return null; + } + + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public readonly record struct GetMysqlStringTypesCntRow(long Cnt, string? CChar, string? CNchar, string? CNationalChar, string? CVarchar, string? CTinytext, string? CMediumtext, string? CText, string? CLongtext, JsonElement? CJson, string? CJsonStringOverride, MysqlStringTypesCEnum? CEnum, HashSet? CSet); + public async Task GetMysqlStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlStringTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesCntRow + { + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlStringTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesCntRow + { + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() }; } } @@ -1106,15 +1306,15 @@ public async Task InsertMysqlTypesBatch(List args) return null; } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlStringTypesSql, connection)) { await command.ExecuteNonQueryAsync(); } @@ -1127,7 +1327,7 @@ public async Task TruncateMysqlTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlTypesSql; + command.CommandText = TruncateMysqlStringTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } @@ -1612,7 +1812,7 @@ public async Task TruncateMysqlBinaryTypes() } } - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; public readonly record struct GetMysqlFunctionsRow(int? MaxInt, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetMysqlFunctions() { diff --git a/examples/MySqlConnectorExample/Utils.cs b/examples/MySqlConnectorExample/Utils.cs index 16c3cd16..d16b3f61 100644 --- a/examples/MySqlConnectorExample/Utils.cs +++ b/examples/MySqlConnectorExample/Utils.cs @@ -14,13 +14,13 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - public class MysqlTypesCSetCsvConverter : DefaultTypeConverter + public class MysqlStringTypesCSetCsvConverter : DefaultTypeConverter { public override string? ConvertToString(object? value, IWriterRow row, MemberMapData memberMapData) { if (value == null) return @"\N"; - if (value is HashSet setVal) + if (value is HashSet setVal) return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index 18c77bee..2e19af32 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -27,14 +27,14 @@ "tables": [ { "rel": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "columns": [ { "name": "c_bool", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -44,7 +44,7 @@ "name": "c_boolean", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -54,7 +54,7 @@ "name": "c_tinyint", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -64,7 +64,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -74,7 +74,7 @@ "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -84,7 +84,7 @@ "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -94,7 +94,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -104,7 +104,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -114,7 +114,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -124,7 +124,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -134,7 +134,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -144,7 +144,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -154,7 +154,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -164,7 +164,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -174,17 +174,24 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_string_types" + }, + "columns": [ { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -194,7 +201,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -204,7 +211,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -214,7 +221,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -224,7 +231,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -234,7 +241,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -244,7 +251,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -254,7 +261,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -264,7 +271,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -274,7 +281,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -284,20 +291,20 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" } }, { "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" } } ] @@ -528,7 +535,7 @@ ], "enums": [ { - "name": "mysql_types_c_enum", + "name": "mysql_string_types_c_enum", "vals": [ "small", "medium", @@ -536,7 +543,7 @@ ] }, { - "name": "mysql_types_c_set", + "name": "mysql_string_types_c_set", "vals": [ "tea", "coffee", @@ -1473,8 +1480,8 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", "cmd": ":exec", "parameters": [ { @@ -1484,7 +1491,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1499,7 +1506,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1514,7 +1521,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1529,7 +1536,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1544,7 +1551,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1559,7 +1566,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1574,7 +1581,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1589,7 +1596,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1604,7 +1611,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1619,7 +1626,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1634,7 +1641,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1649,7 +1656,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1664,7 +1671,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1679,7 +1686,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1694,206 +1701,26 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } } ], "comments": [ - " Basic types " + " Numeric types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_types" + "name": "mysql_numeric_types" } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", "cmd": ":copyfrom", "parameters": [ { @@ -1903,7 +1730,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1918,7 +1745,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1933,7 +1760,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1948,7 +1775,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1963,7 +1790,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1978,7 +1805,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1993,7 +1820,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2008,7 +1835,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -2023,7 +1850,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -2038,7 +1865,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2053,7 +1880,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2068,7 +1895,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2083,7 +1910,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2098,7 +1925,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -2113,265 +1940,85 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2382,7 +2029,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -2393,7 +2040,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -2404,7 +2051,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -2415,7 +2062,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2426,7 +2073,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2437,7 +2084,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2448,7 +2095,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -2459,7 +2106,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -2470,332 +2117,748 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_char", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_char" + "originalName": "c_bool" }, { - "name": "c_nchar", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_nchar" + "originalName": "c_boolean" }, { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" }, { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" }, { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" }, { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" }, { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" }, { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" }, { - "name": "c_json", + "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "float" }, - "originalName": "c_json" + "originalName": "c_float" }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json_string_override" + "originalName": "c_numeric" }, { - "name": "c_enum", - "length": 6, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "decimal" }, - "originalName": "c_enum" + "originalName": "c_decimal" }, { - "name": "c_set", - "length": 15, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_dec" }, { - "name": "c_bool", - "length": 1, + "name": "c_fixed", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "decimal" }, - "originalName": "c_bool" + "originalName": "c_fixed" }, { - "name": "c_boolean", - "length": 1, + "name": "c_double", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_boolean" + "originalName": "c_double" }, { - "name": "c_tinyint", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_tinyint" + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } }, { - "name": "c_smallint", + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "smallint" + "name": "char" }, - "originalName": "c_smallint" + "originalName": "c_char" }, { - "name": "c_mediumint", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumint" + "name": "char" }, - "originalName": "c_mediumint" + "originalName": "c_nchar" }, { - "name": "c_int", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "char" }, - "originalName": "c_int" + "originalName": "c_national_char" }, { - "name": "c_integer", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "varchar" }, - "originalName": "c_integer" + "originalName": "c_varchar" }, { - "name": "c_bigint", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "tinytext" }, - "originalName": "c_bigint" + "originalName": "c_tinytext" }, { - "name": "c_float", + "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "float" + "name": "mediumtext" }, - "originalName": "c_float" + "originalName": "c_mediumtext" }, { - "name": "c_numeric", - "length": 10, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" + "originalName": "c_text" }, { - "name": "c_decimal", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "longtext" }, - "originalName": "c_decimal" + "originalName": "c_longtext" }, { - "name": "c_dec", - "length": 10, + "name": "c_json", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_dec" + "originalName": "c_json" }, { - "name": "c_fixed", - "length": 10, + "name": "c_json_string_override", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_fixed" + "originalName": "c_json_string_override" }, { - "name": "c_double", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_double" + "originalName": "c_enum" }, { - "name": "c_double_precision", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_set" }, - "originalName": "c_double_precision" + "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } }, { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2806,7 +2869,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2817,7 +2880,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -2828,7 +2891,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -2839,7 +2902,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -2850,7 +2913,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -2861,7 +2924,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -2872,7 +2935,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -2883,7 +2946,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2894,7 +2957,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2905,10 +2968,10 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" }, "originalName": "c_enum" }, @@ -2916,10 +2979,10 @@ "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" }, "originalName": "c_set" } @@ -2927,8 +2990,8 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -3669,7 +3732,7 @@ "filename": "query.sql" }, { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index 7057b8c1c95a11fb164564ad8336661d52d8aa17..dc62a011d8b4b472dda060f2bad0ead74dfe9e43 100644 GIT binary patch literal 25740 zcmeHQOK%*<5za2j)v`%h@<_Jj@oPM?WO7XLWm%SJ%MzDd%Cz~g`0xXX!7O)2)Y{8k zYGx^m6a>UM7)cz!Kyt~!xdaFhAaM-Ch=Bk}4hazClH`~id`o^pkV7s`RllZZwr6%I zN{$qh5VZ5Cs_Cw-uKK#Vdxre8i)2QPuJtYdO3gg(UMsC{)Leh4+^Aowt_{)4L(Q!X zxa5~FyX&Pvr0dhul{TA~8=gvOKbcHom z-7Pm8UXhjE$d&!eC2zz0SI2d`;;wG4Ex5q%H|?>ex9JYpYi`~3N=>XuIE)Nu5XWh{kmOl)Hc`az;VHC&Y~8lHtXePwNdxS z)=Sq`s`VzYup+HsZS1k@cD>P@+pN{>u}h_z4@F|vZKS<%W1y|ft0k`tRZK0{Z+g}G zS}M`aDyX3eMvx$!sgaj?5;VC4OcCkMSb60wzfoUd&t4HaOAs5@g)H-%pq`Sq<~FB1 zXqk5!-rJz-y4yTGI1IuzeRs07vEh1Lmp7cl#26f^RQ=}AOru#kQV(>C&r0g@}PtTr08;mm@bWVPGe*R0mJpwWyW z(#@#dQawEb=N|4z@q{r#$}9eQsaAvM4Sym2Vhcyiyr4FoCq3nrb+=O8Tu;Z`V|Gh& zW0-W6(TY<%w9D*3ZJb1Hz=6Bwdg-}Nkv!Z;NJdv;SO7<9#Gjq}8S zN>IL)4G(JXgP+=o1oIDFcpfbFLLSqywUEdkc7&!nu zCG@YQ+76#Lh<3?+=Wt~~UpVO1oWwYTeGWTm)4dilE7$^7Z{d;;5XVG9>=p@6rn5KBhe6ZEf zRv44<9QH@wZ4MuZ;Wo`9T5jMmf0QNnn}^x$3Is5Wdff3pjm6q>!*(1)jFya~24zA) zYV{MCkQB@x%u76d_|QTeJv5b|0%r%VT*l}GCUD`B%y6(ycnZ!X?kQ+TKw*BTBk16( z)q2U>;)*$r3SX@@hx38LRf1LU{%{*2C-vjV36V{9l@KZQ-bevTfC@o}W4EDvdIqSD zBntZFYOS%VYFy9D`y~=HQ%}bR3ZX1YS5C($yE0{${W2Xh@0Unyek()38*xx=94Fao za`NBlOzpl)K7W=Pu`OBfwZ zw*;e!#-&THYjmzxYj4{lWIrg)hknqY-NM*EzS(TR*_v0{xV&X_7vy1X)?IH@J$sNG zq%6F?7%hz)aKT8)$Q3J97`Yf^$t%_US_zKr&of3IVt`w(xE1rS8FElL?LfCW#srWWRBEmkkzdhNyHLXqCI&RXZJnd#Z-C2Pd!hA|dXo!L3osJph) zYLmK7u?=0v9?)5)Ld|Vxn|2?&FGcs*#vbGh=QGeGsX85#J=QJ9v*ssRGOkqzQT0LQ z7bc4fR^bh*62jDR&YGW^S}ZPE=Zu~i81FE}_+}T3JL_9_gldk0Y91q9wd#7c8FENy zpnQbjJdw{D7Rf<3gU}pDrRwavS#olEZn3zqWKGX4%}b-{3mH_u_3HS{a&gh>J2znA zzkZ{4!gC?=atrDjbFaB&=MU;RYtAkH)+)}H(j;tEvp)M zic{LIq4o^!DXNu_pV%+`L}3dmcjx$uK`^!*_$;N-&0l0mp{+WI;y7ofUoKh`^W!tc z#fc(zf+vqX*>BvhsAdkF?<=Kd)Aj0ilGiv5N;*k;(SlbtSn-sge>$@e*gqn09PqR{ z)~m#xgfO#I3^0u;xnqmC8PHyOM`M1N zB`>tKxu6S9&slv#&f?7UL~*QI@tq;?xd)`Z`PdilsPObO7}^l&fTD)NPT0aE4!xW~ zzppmff3oDw-KobKpPOVbN7Wr#KGWU<5$%oFE7)o8dM!H7K=d3TU9_eN%IDo1R8R4h zs{GG0WJHa8bdX~8p~L~+e_(JCjedkhzXFVC!ZDA*01b*eAb|iI2ZddZQ-F%R5IdS_ zVV2gk9Sne!S{1~I#{5Z^e3s$H#l68G;u~?S*3%yZuK*|-hiR0E9WfGi%-BF7f=n4v zSs|=xy)+NDNV%-}Ir%&qVI#7v0GF!8G7eEVDqURx%OwKL<3!*o#iy@+%?o_G{Rm;2 z4igB?DB3bV%aT7vlr*AhX=E^@rV*wmUe-~Sax_diij-lsaIO)iAATm(GpeYHJrrEF zd=3_ZyKf-{o^3rJP*f|%A?WIpn;S4gp}2F7;fW*x zHji)hG0TG7IQl>KKir@Ldf_$bN!TNbq0zm}FBW;?p-^prQt9~UECk6ja;%Sn+oTe< z)$D{b1pe`mY`&OG;{M!V(+9m?3-^2`xZ0;*j)PB$8uc?j-sDe&d1Zy6<{ywZt5oc1Oz=+IG%Eqd$*x z7JxT!0{#&R!E|%=%0&D&=K(K}1uqbqlus`tU;{ZFOinOYzau%E%~=4iG#598-$o)# z2?*2JRu%y`%lR(jV(ip@8=>kH|F*h3z?sPBjB}bf|26{I-2XOPE8x7G-)CHiSq6|! zP_Tr+naSsk7xk+H+QUT2WNu;NG}XPa18`sK9F9DU~;DO`;DS@9Yq^qfLiE!ifyq50~+fd z)>y5}6WYjv#$r6%&{*uq`DXrr@r`zi7E;UAEPfb0U~vy*Vc7RAC1e!z3xCXi_mJOY z@8|$P+eG?pZYlCSDLjP3!R-nF561+6LaAB4Y<7>88yj1fyvF)oBv2Qnt?osFTN(+b zArjmouQvt3fZ~l^I&&Emb{(90cNLrp5D<17RPM4!kizl*tUE|Iab(XCKB9U!6o?)U z1!Tj^0g{Ik3e@HLNW3l3J730#m!ylPhj>LgaUSR?6|U}gKf9w*1}Yw)psR*M5;1hn8k){=quis92jYI`(*)9v{f zjAqXZyl#8G06N>JRM^~}Q@~}W4eJ;ylgSm2X3{}oSm1FnhBny=b`sqrukPxWyKGwE)Pg$_grs87R8ni7kLY4EZ z%EE-xi-IZ(CakhJhLUx&l0|p+Bv%$nh74$R`0Kck*(jyi?UPPy%(fv=Ko2MY9?EwF zB}+@%8H6QAA zrQGC0dNOM2qk%;vgPlAQQjQ7ZTug3jg32yu6w?4LNEtqQ+%<6eP1? zpr%dF=rEqP>QQWGKc~fY$-Dy3*)Jzzxnvt z2x6&7NG~V>pFwpv#A+`lg2`BQgc-GGkHKNA1W^p8uSf)Y*>9x5UCE+i@D@}qz*>3< zX)qQn@sjvTFQq-MQv8JZYeGKjXrHyD9)jn{zN{f7e;jos%5jT3;rF5=O1fu>jl5O2 z5=}|og*6k75jPe^9b@`TY|5?QIHC+2CqXXq zhw^I|Aip+&@htkVPg{Zvwg8Cyy<19T#(cs;uU!UiO$YoG7y!c;<99+Ji&xaVvvN#Ff-Q0X2y0V4v-KP z1ul__iOebPOqk4BQIRs?MFn4kIYm1EDePfKRTfNQG7v?=+!Zqu4Q7*h8`Ubz_jBYU z4PsMi?zB#!PA{?A(oWHW?e=;HZgj!T1j0dm#e0Vm4T6~p)zZzsb&@L*eqsm!q{#^I zNIENqIfc__ZUV2VuAa?IV5d}(sroeewlg`^pjb0sxOXD(?%Mtv_Uf?EReI<4?_>-- zIV?w3eQ~O{Glsl(FeQK$R^6Ds)G;>6a_H$qM7*?`5lbf^!7~R_0_j9Sb#&fXu{ZO> z9Qi@Eea5>LcJMrNV0-+6=+hFVfa1lmVuzes6i;0+Qd2P)&DzC(WXbO{@LDZ=0VVANv*WM#(cL(7 zX*_>pKkG2;7W6Qjm65mkxk@4j*^)?JQ9Bl249iES6ASZ;i|l1Gnt7p?yOA-@y_;M54L#Xtk0l?kZk)CET^$)5eRgoz z`FZDc`MHEiuL>Vdc-8grrQ`4pe|USp{!kEHxcL-fl4!FL5x67Yq2R)Ch`a=&a#eg2zLgO!c=}H| Sv@0`w(joj|4*Kbb8~+9TAi$IW literal 23825 zcmeHP&2QYs74PcHtC3aPBiV77FNTh7iBY7LVmtOGwxg9+t`dDX(mF|7q{3>KmN!}L z%H+ziWEe<$YSE$v8WiXurykp;KoB&y_E_Z7=GY!`?4MAyK!75BZ@xI>l3ZGj8wZeJ zc!%VBk27!Hy!p+0!y*6dC7CJOySd}ttQ)7C+vUxzy5mh$nvLtVjR|&pqP4RHx4g;? zXR|zxbR*Gp*cx!RpR$8q`iB0tfS0ZWw!!NOfA=IxwXbdD$%VP zsG$W$kRY6^QPg=BG+6?skaRm=S$o@SHrDvN*QCx8#0GVt>iia{r|fPxtvMHZ=G~_I z4(Ph!w9b#`K-jkD%$B#d99QV_u02fX_(ZklwI&vtm2!P$8_dI{{~e}%TifgPTE%*Q zNEVGR`pKX%z>J*rjYpp$eU-KKX0x8ltOOse_ZtUVwZ;x~5-{5&}X#AS3`<$BsTFg*%yl&ePcig-qudE+=}Ik%Jg8_Zi$ z(R1XWH17^v2MxmQ8|f%hQ3noC5u;`Gsy7=O9q6H6l8Vbm-;kRGUvUw4sX#xTEM1uu@SUwFai@zpy{ z(8^+n*Fl#^N*NOh(%Y86grwXs#g1JO<~+oFQ_8+m9w?t`I3e3E;?KsPI=o z{h}G5If{s0zzGA#%6h%Iu4_D+R}3s9rju-rOT`8XVJsK@v6$|CYDhA6R#_ZC>6pc6 zLt^8-41tMPAWM3Rbk(ZIGkSadr|bUYa*FgIpo^uHbGc=3;^P+xV0^G(Rz&)v5;$xkSOoa%Wwnj zLc-8US`mN^ZH~)yq(}KF`oBcyFgsb*jc^ z8FE;=>_JH|rUryC>{^&^+DH(h$rp_ZRjwZJCjjz{oN zqpj)2&$`I@)#5^NrexNtV%!D_ab1Tb>CU2K#zVV&nN|8M@FPfLlh55z#k~vBH zVQIltXY}No4ac(EtuSgB?1HUYV^OxKQ46ctWdjCmr}Kc=MG`xg%|Mr=8fIK}zF#fl zjE}m=bfh|nt`D-jGFx0R3*TibAzU4o&E>hd)nduKOa~SK=5xh(MlUS48aoe#YEFP^ zo+7>V+Ged4a6xFG0#Mt}WV@(I`e2yBWE@AO>+C08zFR|cvd=@c&qQ^5 z1ZTjs>@(SJnrkz!1U57?O#ubpXpMhl$a9gpPy;KdhfzALD`CZHZM%*6sxiQnQ7?Nf z*0ju?o7VM}H4Qw)xyYfRj||}{YSnIczne;7~4JgEUnXx-*=HhM|BX* zaW2fiS~O>trx%K=GesN(r%#?9rN=bYEP?aAT5h!*xA8D}jq{+SGvpvz@Y)uyo;J;& z&#d_NkC+=*EbWf<7O`ewnkf|n6N|1Nm{{n+S*PxVJ;HrC^b2Gt;=S~+cu~FXyJ>Vk zdJq31Lvq)y&Q3#}%RGQc(7og22z3=z$E*2Ob7{F`E?rw#po7=8a6KTU_Oo&9HsAGx zV++khm^f}I2IqO>FIeevYnw#X%Ktn=CiR(*ElbQ1lsG2F4=g9bvmfEJUjf!I!8wn^fCvey8sQj6uW+DV^-F*>o0kO3uK*|-7iNrz zmpBP8b2d1lZDQZkbhOYYCo6+qws2^`KUR9gNrJ05HPMNb?T5 z7roCQ7Wby^@MMqxyT>y}xMe})8vP&tA0Ds;z4RLFBIpt2(&z~Hi$xK5DAbyuRJK04 z2$SRmwbn<$9YP7adUV2`0RQ-Gst^Qs@%BYn9bLG`V8pJ9UD$Kk9$HY`b`C>>ea6AM?4`g8=25Woxti;>v>qw!l%Qr}HM93FvF@citi5-I z^%ZmjSOZ$0>fThLDc>5jUNq*&TsrVU+%> zA@ZMYq7OtZqhOK|V!|I!ijHYzNRY})kp`8QGL0#(RfbaDfIx{>#3ZD=gwQ2EG{t*F z>Te8Q4@LYDsXZ)S$K)L=V5*VPu)LdJ@-uKsBIeBG%UA+UUFZoic~gWO<)utu$!k9~ zCGX(UO_pU^Nxd8MM)F})23rGY>+ z2-mEB%KWH8je1sHOP%iuTKf_>$b2x3-kI6Yx&wbV=ND!LX_i#{g4})u9OhhH(mh5J z-&70m}b2SAP43 zf*2W~d``Oy%I78f&Fmps>a@+Eqr8Z13ZUM@pk5fq;hhCB(qJ)*lsNM7*M2#BguWEH zHKBt5xtqbhnx%9halb+bN#U&y(54}7GQyVw){7AVrYdOAR7DZI$uRdO()nwPxHw57 zCClP?BA1J0Xx&k0UF_b6n0l#%E0Y)ChSnY9J%-Ox)J#Sfp~qN_UdcYfTPG*06f#5( zm(>PZCpn=m$9Y}CJ1An>1$9A6v>yDme~^8YelN|giVpH(_GN(oG{Aqt+r-X`7%?i# zj;PsNs|<1R9bBS_KMJ35zr~(}HbyCEv=7U4FgHq%hGAKu+^XC#`tz0M*3Nae zxw((ea69EbKJ&CBu8+^A0iWF`R=ETajiLBGLM;W=_6!jZ?g=910jBL0;_c(JN?^}4 zhVB@^+s9{Lewm3wd;9ndzL-dX8ztU*xbay6Abal+`Ls)9utYozU=E~^>KF@yqKH6X zQAQK6?8m?0f`zx#=6ifBV_ssLt3eD6eL?FTiH)M1RigIy- z3|dqRpI}4_ekIJuGQxHF8~zLg@3R0F`~uwNQDQxyD7U`hmX<~Qs)mQ1_?DJ2E#ee} z!x51tYbqFJif4fJoaiRbBRVSSyA50e3Nq_bTr?^4#+WFQH60s43JHjf99a_)QKJxZ zAX;R-5EChiW`f3oVPYiKIv*V!3h_lmg{-rY(UA~iTU3Pih>sFN^pOE1*bAtFNAdKW zgv<9)Ogud(L6E^7R~h$#&{jN$B;rbIkLrXtBceIhLZ?Vhtc-{V4zv{(syN_ve;kq^ zCv`z-or#Rt#4t%DV9h2*X=1p;(HY)1eq<)Z78#M@qft7ngm%V6VW8%L7)pYKgqfXd zIvjNg)gLs?q9mYc{N**5jx^!DF%cI2&Tw?4{hfML#ac-dQHhZeGgE_a5#f-O->=B% z37DFSoP=JP5HZnaWaGC(xQM&zo*Lmm?m7jm{1mvTDyFQrI6m&^M+eHFOUQdr{t*AXfauV(YU zR&Ja7#j|h%i|udKFaWu-vV83-?EV(?Ut4zpMv+|wMcGZfh(IO{)Dms@26l@?Vuu4r zY^Ko?8TltkH;k#kX113ze!Ky8oPZm^zDxR07RNr)a91w|V1g z*6tBZF_``U0p31X3r(gH;q8O9?M()}7%bigYvGklVyf%@!CHYBp$$qJ2B1`KPtt+_ z?VFQ6?IAzPL~KrypA54dfS$ig3BOmC;0kHb zFLMR<%go?}82-5WNwsk%@{4SYu_6o3llW_2f$cL)5LRDbB%~08EGP(nA#>f$bGvwj z^@Z=LJIR2Rhor`RFtQAJmL9$jM(HhuF#RKK+y|q0AywQf($-il(y>~&kOE3<8vafl zHfKefBGRZapMV>G?jb+SwC7W08~O{%UekoV5|12ZXYky(f{ppRj{ zvI_q)M}sQ`FhpD`NHfB+GIV*NM?oc_ur~?9k4g|eD=b?Ni@q4;SYoOfb|@BO#<%Lu zB!L-8r4!*H3t8oa0~ul*2M>KJeIy8o?O73N;f^Aq+PZ_#deiu85BZz;GJq}BVnPP2 zKgC)O+bib<*>-uY+M?g!p9JvPO)~KQ4KH=??={j`jz=0x`jy+hJz+P(#Ipx`S|ukU zQBAPlsS$uTvw=a~4s+h`BY%j3Irp*Kw&u7X>nfLeTlrk}Ht z%*~P!c-{)`lH_vjR{{N837@`_ECyVV(jVd#bz#2-@MkgZSe9g=A_|QfMMNdL!!7~@ znq~nMc%pmeWO0Cmm?%I(_-nrbJBnBRfHvMsFHFU^nMf5CXM$gY-NvlR(jL-_72#_p zRBRB$^3=CYZYB|!O|3+9t1y1rM?Q=|Y&y-uHYgs89u#fZZhv&(?#lK3(IJPI23ZQx zqv7v8`q+ywkK}LSx%9IrOC2phk{zR zMMu%PErv1l1Fwg)eLPwuZ2Rd!_Wtn5#2k0`4I3WIDv$vusEEf9fJ1 zWZ)*=#S>TIAYnK{IQl&3gfoZnWAKzmMTd_k_79Kd zrxph)BF{+P`?=ECZO|y#jR5Tm9x{Dj=SmB?;!Nhfh*q80egL!(j*H(Cthe==x8T6; XV8zp)E8VHiaISRlbZ~b5^xgjf-lf}I diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index fdcce71b..1adb9108 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -6,7 +6,7 @@ namespace MySqlConnectorLegacyExampleGen using System.Linq; using System.Text.Json; - public class MysqlType + public class MysqlNumericType { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -23,6 +23,9 @@ public class MysqlType public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } + }; + public class MysqlStringType + { public string CChar { get; set; } public string CNchar { get; set; } public string CNationalChar { get; set; } @@ -33,8 +36,8 @@ public class MysqlType public string CLongtext { get; set; } public JsonElement? CJson { get; set; } public JsonElement? CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; public class MysqlDatetimeType { @@ -74,7 +77,7 @@ public class ExtendedBio public ExtendedBiosBioType? BioType { get; set; } public HashSet AuthorType { get; set; } }; - public enum MysqlTypesCEnum + public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value Small = 1, @@ -82,27 +85,27 @@ public enum MysqlTypesCEnum Big = 3 } - public static class MysqlTypesCEnumExtensions + public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCEnum.Invalid, - ["small"] = MysqlTypesCEnum.Small, - ["medium"] = MysqlTypesCEnum.Medium, - ["big"] = MysqlTypesCEnum.Big + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - public static MysqlTypesCEnum ToMysqlTypesCEnum(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCEnumSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum MysqlTypesCSet + public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value Tea = 1, @@ -110,23 +113,23 @@ public enum MysqlTypesCSet Milk = 3 } - public static class MysqlTypesCSetExtensions + public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlTypesCSet.Invalid, - ["tea"] = MysqlTypesCSet.Tea, - ["coffee"] = MysqlTypesCSet.Coffee, - ["milk"] = MysqlTypesCSet.Milk + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - public static MysqlTypesCSet ToMysqlTypesCSet(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static HashSet ToMysqlTypesCSetSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 60c40449..87846138 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -42,8 +42,8 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlTypesSql = " INSERT INTO mysql_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision, @c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public class InsertMysqlTypesArgs + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public class InsertMysqlNumericTypesArgs { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -60,27 +60,15 @@ public class InsertMysqlTypesArgs public double? CFloat { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlTypesSql, connection)) + using (var command = new MySqlCommand(InsertMysqlNumericTypesSql, connection)) { command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); @@ -97,18 +85,6 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -120,7 +96,7 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlTypesSql; + command.CommandText = InsertMysqlNumericTypesSql; command.Transaction = this.Transaction; command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); @@ -137,23 +113,11 @@ public async Task InsertMysqlTypes(InsertMysqlTypesArgs args) command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public class InsertMysqlTypesBatchArgs + public class InsertMysqlNumericTypesBatchArgs { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -170,20 +134,8 @@ public class InsertMysqlTypesBatchArgs public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task InsertMysqlTypesBatch(List args) + public async Task InsertMysqlNumericTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; var config = new CsvConfiguration(CultureInfo.CurrentCulture) @@ -206,15 +158,11 @@ public async Task InsertMysqlTypesBatch(List args) csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlTypesCSetCsvConverter()); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -224,7 +172,7 @@ public async Task InsertMysqlTypesBatch(List args) var loader = new MySqlBulkLoader(connection) { Local = true, - TableName = "mysql_types", + TableName = "mysql_numeric_types", FileName = "input.csv", FieldTerminator = ",", FieldQuotationCharacter = '"', @@ -232,14 +180,14 @@ public async Task InsertMysqlTypesBatch(List args) NumberOfLinesToSkip = 1, LineTerminator = "\n" }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision", "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1"; - public class GetMysqlTypesRow + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public class GetMysqlNumericTypesRow { public bool? CBool { get; set; } public bool? CBoolean { get; set; } @@ -256,33 +204,21 @@ public class GetMysqlTypesRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task GetMysqlTypes() + public async Task GetMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesSql, connection)) + using (var command = new MySqlCommand(GetMysqlNumericTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesRow + return new GetMysqlNumericTypesRow { CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), @@ -298,19 +234,7 @@ public async Task GetMysqlTypes() CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), - CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), - CText = reader.IsDBNull(21) ? null : reader.GetString(21), - CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CJson = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), - CEnum = reader.IsDBNull(25) ? (MysqlTypesCEnum? )null : reader.GetString(25).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) }; } } @@ -327,13 +251,13 @@ public async Task GetMysqlTypes() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesSql; + command.CommandText = GetMysqlNumericTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesRow + return new GetMysqlNumericTypesRow { CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), @@ -349,19 +273,7 @@ public async Task GetMysqlTypes() CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CNchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNationalChar = reader.IsDBNull(17) ? null : reader.GetString(17), - CVarchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CTinytext = reader.IsDBNull(19) ? null : reader.GetString(19), - CMediumtext = reader.IsDBNull(20) ? null : reader.GetString(20), - CText = reader.IsDBNull(21) ? null : reader.GetString(21), - CLongtext = reader.IsDBNull(22) ? null : reader.GetString(22), - CJson = reader.IsDBNull(23) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(23)), - CJsonStringOverride = reader.IsDBNull(24) ? null : reader.GetString(24), - CEnum = reader.IsDBNull(25) ? (MysqlTypesCEnum? )null : reader.GetString(25).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(26) ? null : reader.GetString(26).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) }; } } @@ -370,8 +282,8 @@ public async Task GetMysqlTypes() return null; } - private const string GetMysqlTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public class GetMysqlTypesCntRow + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public class GetMysqlNumericTypesCntRow { public long Cnt { get; set; } public bool? CBool { get; set; } @@ -389,33 +301,21 @@ public class GetMysqlTypesCntRow public decimal? CFixed { get; set; } public double? CDouble { get; set; } public double? CDoublePrecision { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } }; - public async Task GetMysqlTypesCnt() + public async Task GetMysqlNumericTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlTypesCntSql, connection)) + using (var command = new MySqlCommand(GetMysqlNumericTypesCntSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesCntRow + return new GetMysqlNumericTypesCntRow { Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), @@ -432,19 +332,7 @@ public async Task GetMysqlTypesCnt() CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CChar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), - CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), - CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), - CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), - CText = reader.IsDBNull(22) ? null : reader.GetString(22), - CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), - CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), - CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), - CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) }; } } @@ -461,13 +349,13 @@ public async Task GetMysqlTypesCnt() using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlTypesCntSql; + command.CommandText = GetMysqlNumericTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlTypesCntRow + return new GetMysqlNumericTypesCntRow { Cnt = reader.GetInt64(0), CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), @@ -484,19 +372,340 @@ public async Task GetMysqlTypesCnt() CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15), - CChar = reader.IsDBNull(16) ? null : reader.GetString(16), - CNchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CNationalChar = reader.IsDBNull(18) ? null : reader.GetString(18), - CVarchar = reader.IsDBNull(19) ? null : reader.GetString(19), - CTinytext = reader.IsDBNull(20) ? null : reader.GetString(20), - CMediumtext = reader.IsDBNull(21) ? null : reader.GetString(21), - CText = reader.IsDBNull(22) ? null : reader.GetString(22), - CLongtext = reader.IsDBNull(23) ? null : reader.GetString(23), - CJson = reader.IsDBNull(24) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(24)), - CJsonStringOverride = reader.IsDBNull(25) ? null : reader.GetString(25), - CEnum = reader.IsDBNull(26) ? (MysqlTypesCEnum? )null : reader.GetString(26).ToMysqlTypesCEnum(), - CSet = reader.IsDBNull(27) ? null : reader.GetString(27).ToMysqlTypesCSetSet() + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) + }; + } + } + } + + return null; + } + + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(TruncateMysqlNumericTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateMysqlNumericTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public class InsertMysqlStringTypesArgs + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(InsertMysqlStringTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlStringTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public class InsertMysqlStringTypesBatchArgs + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task InsertMysqlStringTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public class GetMysqlStringTypesRow + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task GetMysqlStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlStringTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlStringTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } + } + + return null; + } + + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public class GetMysqlStringTypesCntRow + { + public long Cnt { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task GetMysqlStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlStringTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesCntRow + { + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetMysqlStringTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesCntRow + { + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() }; } } @@ -505,15 +714,15 @@ public async Task GetMysqlTypesCnt() return null; } - private const string TruncateMysqlTypesSql = "TRUNCATE TABLE mysql_types"; - public async Task TruncateMysqlTypes() + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlTypesSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlStringTypesSql, connection)) { await command.ExecuteNonQueryAsync(); } @@ -526,7 +735,7 @@ public async Task TruncateMysqlTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlTypesSql; + command.CommandText = TruncateMysqlStringTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } @@ -1076,7 +1285,7 @@ public async Task TruncateMysqlBinaryTypes() } } - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_types CROSS JOIN mysql_datetime_types"; + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; public class GetMysqlFunctionsRow { public int? MaxInt { get; set; } diff --git a/examples/MySqlConnectorLegacyExample/Utils.cs b/examples/MySqlConnectorLegacyExample/Utils.cs index f43f4088..93f3b167 100644 --- a/examples/MySqlConnectorLegacyExample/Utils.cs +++ b/examples/MySqlConnectorLegacyExample/Utils.cs @@ -15,13 +15,13 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - public class MysqlTypesCSetCsvConverter : DefaultTypeConverter + public class MysqlStringTypesCSetCsvConverter : DefaultTypeConverter { public override string ConvertToString(object value, IWriterRow row, MemberMapData memberMapData) { if (value == null) return @"\N"; - if (value is HashSet setVal) + if (value is HashSet setVal) return string.Join(",", setVal); return base.ConvertToString(value, row, memberMapData); } diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index 867f5c46..42781545 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -27,14 +27,14 @@ "tables": [ { "rel": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "columns": [ { "name": "c_bool", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -44,7 +44,7 @@ "name": "c_boolean", "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -54,7 +54,7 @@ "name": "c_tinyint", "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -64,7 +64,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -74,7 +74,7 @@ "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -84,7 +84,7 @@ "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -94,7 +94,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -104,7 +104,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -114,7 +114,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -124,7 +124,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -134,7 +134,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -144,7 +144,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -154,7 +154,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -164,7 +164,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -174,17 +174,24 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" } - }, + } + ] + }, + { + "rel": { + "name": "mysql_string_types" + }, + "columns": [ { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -194,7 +201,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -204,7 +211,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -214,7 +221,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -224,7 +231,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -234,7 +241,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -244,7 +251,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -254,7 +261,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -264,7 +271,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -274,7 +281,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -284,20 +291,20 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" } }, { "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" } } ] @@ -528,7 +535,7 @@ ], "enums": [ { - "name": "mysql_types_c_enum", + "name": "mysql_string_types_c_enum", "vals": [ "small", "medium", @@ -536,7 +543,7 @@ ] }, { - "name": "mysql_types_c_set", + "name": "mysql_string_types_c_set", "vals": [ "tea", "coffee", @@ -622,8 +629,8 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypes", + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", "cmd": ":exec", "parameters": [ { @@ -633,7 +640,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -648,7 +655,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -663,7 +670,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -678,7 +685,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -693,7 +700,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -708,7 +715,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -723,7 +730,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -738,7 +745,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -753,7 +760,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -768,7 +775,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -783,7 +790,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -798,7 +805,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -813,7 +820,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -828,7 +835,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -843,206 +850,26 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } - }, - { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } } ], "comments": [ - " Basic types " + " Numeric types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_types" + "name": "mysql_numeric_types" } }, { - "text": "INSERT INTO mysql_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, \n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlTypesBatch", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", "cmd": ":copyfrom", "parameters": [ { @@ -1052,7 +879,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1067,7 +894,7 @@ "length": 1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1082,7 +909,7 @@ "length": 3, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "tinyint" @@ -1097,7 +924,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "smallint" @@ -1112,7 +939,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "mediumint" @@ -1127,7 +954,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1142,7 +969,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1157,7 +984,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1172,7 +999,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1187,7 +1014,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1202,7 +1029,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1217,7 +1044,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1232,7 +1059,7 @@ "length": 10, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1247,7 +1074,7 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1262,265 +1089,85 @@ "length": -1, "table": { "schema": "public", - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } + }, + { + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" }, { - "number": 16, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" }, { - "number": 17, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" }, { - "number": 18, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" }, { - "number": 19, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 20, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 21, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 22, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 23, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 24, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 25, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 26, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 27, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_types" - }, - "type": { - "name": "mysql_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_types" - } - }, - { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1", - "name": "GetMysqlTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" }, { "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1531,7 +1178,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "int" @@ -1542,7 +1189,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "bigint" @@ -1553,7 +1200,7 @@ "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "float" @@ -1564,7 +1211,7 @@ "name": "c_decimal", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1575,7 +1222,7 @@ "name": "c_dec", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1586,7 +1233,7 @@ "name": "c_numeric", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1597,7 +1244,7 @@ "name": "c_fixed", "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "decimal" @@ -1608,7 +1255,7 @@ "name": "c_double", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" @@ -1619,332 +1266,748 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { "name": "double" }, "originalName": "c_double_precision" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_char", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_char" + "originalName": "c_bool" }, { - "name": "c_nchar", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_nchar" + "originalName": "c_boolean" }, { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" }, { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" }, { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" }, { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" }, { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" }, { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" }, { - "name": "c_json", + "name": "c_float", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "float" }, - "originalName": "c_json" + "originalName": "c_float" }, { - "name": "c_json_string_override", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json_string_override" + "originalName": "c_numeric" }, { - "name": "c_enum", - "length": 6, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "decimal" }, - "originalName": "c_enum" + "originalName": "c_decimal" }, { - "name": "c_set", - "length": 15, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mysql_types_c_set" + "name": "decimal" }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_dec" }, { - "name": "c_bool", - "length": 1, + "name": "c_fixed", + "length": 10, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "decimal" }, - "originalName": "c_bool" + "originalName": "c_fixed" }, { - "name": "c_boolean", - "length": 1, + "name": "c_double", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_boolean" + "originalName": "c_double" }, { - "name": "c_tinyint", - "length": 3, + "name": "c_double_precision", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinyint" + "name": "double" }, - "originalName": "c_tinyint" + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } }, { - "name": "c_smallint", + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + } + }, + { + "number": 7, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_string_types" + } + }, + { + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "smallint" + "name": "char" }, - "originalName": "c_smallint" + "originalName": "c_char" }, { - "name": "c_mediumint", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumint" + "name": "char" }, - "originalName": "c_mediumint" + "originalName": "c_nchar" }, { - "name": "c_int", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "char" }, - "originalName": "c_int" + "originalName": "c_national_char" }, { - "name": "c_integer", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "int" + "name": "varchar" }, - "originalName": "c_integer" + "originalName": "c_varchar" }, { - "name": "c_bigint", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "tinytext" }, - "originalName": "c_bigint" + "originalName": "c_tinytext" }, { - "name": "c_float", + "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "float" + "name": "mediumtext" }, - "originalName": "c_float" + "originalName": "c_mediumtext" }, { - "name": "c_numeric", - "length": 10, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_numeric" + "originalName": "c_text" }, { - "name": "c_decimal", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "longtext" }, - "originalName": "c_decimal" + "originalName": "c_longtext" }, { - "name": "c_dec", - "length": 10, + "name": "c_json", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_dec" + "originalName": "c_json" }, { - "name": "c_fixed", - "length": 10, + "name": "c_json_string_override", + "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "decimal" + "name": "json" }, - "originalName": "c_fixed" + "originalName": "c_json_string_override" }, { - "name": "c_double", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_double" + "originalName": "c_enum" }, { - "name": "c_double_precision", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "double" + "name": "mysql_string_types_c_set" }, - "originalName": "c_double_precision" + "originalName": "c_set" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } }, { "name": "c_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1955,7 +2018,7 @@ "name": "c_nchar", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1966,7 +2029,7 @@ "name": "c_national_char", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "char" @@ -1977,7 +2040,7 @@ "name": "c_varchar", "length": 100, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "varchar" @@ -1988,7 +2051,7 @@ "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "tinytext" @@ -1999,7 +2062,7 @@ "name": "c_mediumtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "mediumtext" @@ -2010,7 +2073,7 @@ "name": "c_text", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "text" @@ -2021,7 +2084,7 @@ "name": "c_longtext", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "longtext" @@ -2032,7 +2095,7 @@ "name": "c_json", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2043,7 +2106,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { "name": "json" @@ -2054,10 +2117,10 @@ "name": "c_enum", "length": 6, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_enum" + "name": "mysql_string_types_c_enum" }, "originalName": "c_enum" }, @@ -2065,10 +2128,10 @@ "name": "c_set", "length": 15, "table": { - "name": "mysql_types" + "name": "mysql_string_types" }, "type": { - "name": "mysql_types_c_set" + "name": "mysql_string_types_c_set" }, "originalName": "c_set" } @@ -2076,8 +2139,8 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_types", - "name": "TruncateMysqlTypes", + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", "cmd": ":exec", "filename": "query.sql" }, @@ -2818,7 +2881,7 @@ "filename": "query.sql" }, { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_types\nCROSS JOIN mysql_datetime_types", + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", "name": "GetMysqlFunctions", "cmd": ":one", "columns": [ diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index 70a15d96..75ce6bc3 100644 --- a/examples/MySqlConnectorLegacyExample/request.message +++ b/examples/MySqlConnectorLegacyExample/request.message @@ -2,46 +2,47 @@ ° 2mysql&examples/config/mysql/types/schema.sql(examples/config/mysql/authors/schema.sql"%examples/config/mysql/types/query.sql"'examples/config/mysql/authors/query.sqlb╔ $examples/MySqlConnectorLegacyExamplecsharpВ{"debugRequest":true,"generateCsproj":true,"namespaceName":"MySqlConnectorLegacyExampleGen","overrides":[{"column":"GetMysqlFunctions:max_int","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetMysqlFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetMysqlFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner¤public"Уpublic· -  mysql_types$ -c_bool0R  mysql_typesb tinyint' - c_boolean0R  mysql_typesb tinyint' - c_tinyint0R  mysql_typesb tinyint2 - -c_smallint0         R  mysql_typesb -smallint4 - c_mediumint0         R  mysql_typesb  mediumint( -c_int0         R  mysql_typesbint, - c_integer0         R  mysql_typesbint. -c_bigint0         R  mysql_typesbbigint, -c_float0         R  mysql_typesbfloat' +./dist/LocalRunnerЖpublic"Ьpublicп +mysql_numeric_types, +c_bool0Rmysql_numeric_typesb tinyint/ + c_boolean0Rmysql_numeric_typesb tinyint/ + c_tinyint0Rmysql_numeric_typesb tinyint: + +c_smallint0         Rmysql_numeric_typesb +smallint< + c_mediumint0         Rmysql_numeric_typesb  mediumint0 +c_int0         Rmysql_numeric_typesbint4 + c_integer0         Rmysql_numeric_typesbint6 +c_bigint0         Rmysql_numeric_typesbbigint4 +c_float0         Rmysql_numeric_typesbfloat/ c_decimal0 -R  mysql_typesb decimal# +Rmysql_numeric_typesb decimal+ c_dec0 -R  mysql_typesb decimal' +Rmysql_numeric_typesb decimal/ c_numeric0 -R  mysql_typesb decimal% +Rmysql_numeric_typesb decimal- c_fixed0 -R  mysql_typesb decimal. -c_double0         R  mysql_typesbdouble8 -c_double_precision0         R  mysql_typesbdouble* -c_char0         R  mysql_typesbchar+ -c_nchar0         R  mysql_typesbchar3 -c_national_char0         R  mysql_typesbchar' - c_varchar0dR  mysql_typesb varchar2 - -c_tinytext0         R  mysql_typesb -tinytext6 - c_mediumtext0         R  mysql_typesb  -mediumtext* -c_text0         R  mysql_typesbtext2 - -c_longtext0         R  mysql_typesb -longtext* -c_json0         R  mysql_typesbjson: -c_json_string_override0         R  mysql_typesbjson/ -c_enum0R  mysql_typesbmysql_types_c_enum- -c_set0R  mysql_typesbmysql_types_c_setШ +Rmysql_numeric_typesb decimal6 +c_double0         Rmysql_numeric_typesbdouble@ +c_double_precision0         Rmysql_numeric_typesbdouble├ +mysql_string_types1 +c_char0         Rmysql_string_typesbchar2 +c_nchar0         Rmysql_string_typesbchar: +c_national_char0         Rmysql_string_typesbchar. + c_varchar0dRmysql_string_typesb varchar9 + +c_tinytext0         Rmysql_string_typesb +tinytext= + c_mediumtext0         Rmysql_string_typesb  +mediumtext1 +c_text0         Rmysql_string_typesbtext9 + +c_longtext0         Rmysql_string_typesb +longtext1 +c_json0         Rmysql_string_typesbjsonA +c_json_string_override0         Rmysql_string_typesbjson= +c_enum0Rmysql_string_typesbmysql_string_types_c_enum; +c_set0Rmysql_string_typesbmysql_string_types_c_setШ mysql_datetime_types3 c_year0         Rmysql_datetime_typesbyear3 c_date0         Rmysql_datetime_typesbdate2 @@ -73,9 +74,9 @@ c_longblob0 id0         Rbooksbbigint$ name0         Rbooksbtext+ author_id0         Rbooksbbigint) - description0         Rbooksbtext"( -mysql_types_c_enumsmallmediumbig"& -mysql_types_c_setteacoffeemilk"1 + description0         Rbooksbtext"/ +mysql_string_types_c_enumsmallmediumbig"- +mysql_string_types_c_setteacoffeemilk"1 bios_bio_type Autobiography BiographyMemoir". bios_author_typeAuthorEditor Translator"▄extended╧ @@ -83,9 +84,9 @@ Translator" author_name0dRextendedbiosb varchar% name0dRextendedbiosb varchar/ bio_type0 Rextendedbiosb bios_bio_type5 - author_type0Rextendedbiosbbios_author_typeХ -ў -INSERT INTO mysql_types + author_type0Rextendedbiosbbios_author_type  +╜ +INSERT INTO mysql_numeric_types ( c_bool, c_boolean, @@ -95,64 +96,39 @@ INSERT INTO mysql_types c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, - c_char, - c_nchar, - c_national_char, - c_varchar, - c_tinytext, - c_mediumtext, - c_text, - c_longtext, - c_json, - c_json_string_override, - c_enum, - c_set + c_decimal, + c_dec, + c_numeric, + c_fixed, + c_float, + c_double, + c_double_precision ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypes:exec*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlNumericTypes:exec*@< +c_bool0Rpublicmysql_numeric_typesb tinyintzc_bool*FB + c_boolean0Rpublicmysql_numeric_typesb tinyintz c_boolean*FB + c_tinyint0Rpublicmysql_numeric_typesb tinyintz c_tinyint*RN -c_smallint0         Rpublic mysql_typesb +c_smallint0         Rpublicmysql_numeric_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D@ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*> : +c_smallint*UQ + c_mediumint0         Rpublicmysql_numeric_typesb  mediumintz c_mediumint*C? +c_int0         Rpublicmysql_numeric_typesbintzc_int*KG + c_integer0         Rpublicmysql_numeric_typesbintz c_integer*LH +c_bigint0         Rpublicmysql_numeric_typesbbigintzc_bigint*F B c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 -2 +Rpublicmysql_numeric_typesb decimalz c_decimal*> +: c_dec0 -Rpublic mysql_typesb decimalzc_dec*> : +Rpublicmysql_numeric_typesb decimalzc_dec*F B c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*: 6 +Rpublicmysql_numeric_typesb decimalz c_numeric*B > c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*A = -c_float0         Rpublic mysql_typesbfloatzc_float*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF - -c_tinytext0         Rpublic mysql_typesb -tinytextz -c_tinytext*PL - c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF - -c_longtext0         Rpublic mysql_typesb -longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set2 Basic types : query.sqlB  mysql_typesО -ЎINSERT INTO mysql_types +Rpublicmysql_numeric_typesb decimalzc_fixed*I E +c_float0         Rpublicmysql_numeric_typesbfloatzc_float*LH +c_double0         Rpublicmysql_numeric_typesbdoublezc_double*`\ +c_double_precision0         Rpublicmysql_numeric_typesbdoublezc_double_precision2 Numeric types : query.sqlBmysql_numeric_typesЎ +╝INSERT INTO mysql_numeric_types ( c_bool, c_boolean, @@ -162,106 +138,62 @@ c_longtext*>: c_int, c_integer, c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, - c_char, - c_nchar, - c_national_char, - c_varchar, - c_tinytext, - c_mediumtext, - c_text, - c_longtext, - c_json, - c_json_string_override, - c_enum, - c_set + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlTypesBatch :copyfrom*84 -c_bool0Rpublic mysql_typesb tinyintzc_bool*>: - c_boolean0Rpublic mysql_typesb tinyintz c_boolean*>: - c_tinyint0Rpublic mysql_typesb tinyintz c_tinyint*JF +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlNumericTypesBatch :copyfrom*@< +c_bool0Rpublicmysql_numeric_typesb tinyintzc_bool*FB + c_boolean0Rpublicmysql_numeric_typesb tinyintz c_boolean*FB + c_tinyint0Rpublicmysql_numeric_typesb tinyintz c_tinyint*RN -c_smallint0         Rpublic mysql_typesb +c_smallint0         Rpublicmysql_numeric_typesb smallintz -c_smallint*MI - c_mediumint0         Rpublic mysql_typesb  mediumintz c_mediumint*;7 -c_int0         Rpublic mysql_typesbintzc_int*C? - c_integer0         Rpublic mysql_typesbintz c_integer*D@ -c_bigint0         Rpublic mysql_typesbbigintzc_bigint*A = -c_float0         Rpublic mysql_typesbfloatzc_float*> -: +c_smallint*UQ + c_mediumint0         Rpublicmysql_numeric_typesb  mediumintz c_mediumint*C? +c_int0         Rpublicmysql_numeric_typesbintzc_int*KG + c_integer0         Rpublicmysql_numeric_typesbintz c_integer*LH +c_bigint0         Rpublicmysql_numeric_typesbbigintzc_bigint*I E +c_float0         Rpublicmysql_numeric_typesbfloatzc_float*F +B c_numeric0 -Rpublic mysql_typesb decimalz c_numeric*> : +Rpublicmysql_numeric_typesb decimalz c_numeric*F B c_decimal0 -Rpublic mysql_typesb decimalz c_decimal*6 2 +Rpublicmysql_numeric_typesb decimalz c_decimal*> : c_dec0 -Rpublic mysql_typesb decimalzc_dec*: 6 +Rpublicmysql_numeric_typesb decimalzc_dec*B > c_fixed0 -Rpublic mysql_typesb decimalzc_fixed*D@ -c_double0         Rpublic mysql_typesbdoublezc_double*XT -c_double_precision0         Rpublic mysql_typesbdoublezc_double_precision*>: -c_char0         Rpublic mysql_typesbcharzc_char*@< -c_nchar0         Rpublic mysql_typesbcharzc_nchar*PL -c_national_char0         Rpublic mysql_typesbcharzc_national_char*>: - c_varchar0dRpublic mysql_typesb varcharz c_varchar*JF - -c_tinytext0         Rpublic mysql_typesb -tinytextz -c_tinytext*PL - c_mediumtext0         Rpublic mysql_typesb  -mediumtextz c_mediumtext*>: -c_text0         Rpublic mysql_typesbtextzc_text*JF +Rpublicmysql_numeric_typesb decimalzc_fixed*LH +c_double0         Rpublicmysql_numeric_typesbdoublezc_double*`\ +c_double_precision0         Rpublicmysql_numeric_typesbdoublezc_double_precision: query.sqlBmysql_numeric_typesи +╞SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1GetMysqlNumericTypes:one"4 +c_bool0Rmysql_numeric_typesb tinyintzc_bool": + c_boolean0Rmysql_numeric_typesb tinyintz c_boolean": + c_tinyint0Rmysql_numeric_typesb tinyintz c_tinyint"F -c_longtext0         Rpublic mysql_typesb -longtextz -c_longtext*>: -c_json0         Rpublic mysql_typesbjsonzc_json*^Z -c_json_string_override0         Rpublic mysql_typesbjsonzc_json_string_override*C? -c_enum0Rpublic mysql_typesbmysql_types_c_enumzc_enum*@< -c_set0Rpublic mysql_typesbmysql_types_c_setzc_set: query.sqlB  mysql_typesА -╚SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_types LIMIT 1 GetMysqlTypes:one", -c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"2 - c_tinyint0R  mysql_typesb tinyintz c_tinyint"> - -c_smallint0         R  mysql_typesb +c_smallint0         Rmysql_numeric_typesb smallintz -c_smallint"A - c_mediumint0         R  mysql_typesb  mediumintz c_mediumint"/ -c_int0         R  mysql_typesbintzc_int"7 - c_integer0         R  mysql_typesbintz c_integer"8 -c_bigint0         R  mysql_typesbbigintzc_bigint"5 -c_float0         R  mysql_typesbfloatzc_float"2 +c_smallint"I + c_mediumint0         Rmysql_numeric_typesb  mediumintz c_mediumint"7 +c_int0         Rmysql_numeric_typesbintzc_int"? + c_integer0         Rmysql_numeric_typesbintz c_integer"@ +c_bigint0         Rmysql_numeric_typesbbigintzc_bigint"= +c_float0         Rmysql_numeric_typesbfloatzc_float": c_decimal0 -R  mysql_typesb decimalz c_decimal"* +Rmysql_numeric_typesb decimalz c_decimal"2 c_dec0 -R  mysql_typesb decimalzc_dec"2 +Rmysql_numeric_typesb decimalzc_dec": c_numeric0 -R  mysql_typesb decimalz c_numeric". +Rmysql_numeric_typesb decimalz c_numeric"6 c_fixed0 -R  mysql_typesb decimalzc_fixed"8 -c_double0         R  mysql_typesbdoublezc_double"L -c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_char0         R  mysql_typesbcharzc_char"4 -c_nchar0         R  mysql_typesbcharzc_nchar"D -c_national_char0         R  mysql_typesbcharzc_national_char"2 - c_varchar0dR  mysql_typesb varcharz c_varchar"> - -c_tinytext0         R  mysql_typesb -tinytextz -c_tinytext"D - c_mediumtext0         R  mysql_typesb  -mediumtextz c_mediumtext"2 -c_text0         R  mysql_typesbtextzc_text"> - -c_longtext0         R  mysql_typesb -longtextz -c_longtext"2 -c_json0         R  mysql_typesbjsonzc_json"R -c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 -c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlк -╧SELECT +Rmysql_numeric_typesb decimalzc_fixed"@ +c_double0         Rmysql_numeric_typesbdoublezc_double"T +c_double_precision0         Rmysql_numeric_typesbdoublezc_double_precision: query.sqlЖ +БSELECT COUNT(*) AS cnt, c_bool, c_boolean, @@ -277,7 +209,147 @@ c_longtext"2 c_dec, c_fixed, c_double, - c_double_precision, + c_double_precision +FROM mysql_numeric_types +GROUP BY + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision +LIMIT 1GetMysqlNumericTypesCnt:one" +cnt0         @bbigint"4 +c_bool0Rmysql_numeric_typesb tinyintzc_bool": + c_boolean0Rmysql_numeric_typesb tinyintz c_boolean": + c_tinyint0Rmysql_numeric_typesb tinyintz c_tinyint"F + +c_smallint0         Rmysql_numeric_typesb +smallintz +c_smallint"I + c_mediumint0         Rmysql_numeric_typesb  mediumintz c_mediumint"7 +c_int0         Rmysql_numeric_typesbintzc_int"? + c_integer0         Rmysql_numeric_typesbintz c_integer"@ +c_bigint0         Rmysql_numeric_typesbbigintzc_bigint"= +c_float0         Rmysql_numeric_typesbfloatzc_float": + c_numeric0 +Rmysql_numeric_typesb decimalz c_numeric": + c_decimal0 +Rmysql_numeric_typesb decimalz c_decimal"2 +c_dec0 +Rmysql_numeric_typesb decimalzc_dec"6 +c_fixed0 +Rmysql_numeric_typesb decimalzc_fixed"@ +c_double0         Rmysql_numeric_typesbdoublezc_double"T +c_double_precision0         Rmysql_numeric_typesbdoublezc_double_precision: query.sqlQ +"TRUNCATE TABLE mysql_numeric_typesTruncateMysqlNumericTypes:exec: query.sqlе + +Л +INSERT INTO mysql_string_types +( + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlStringTypes:exec*EA +c_char0         Rpublicmysql_string_typesbcharzc_char*GC +c_nchar0         Rpublicmysql_string_typesbcharzc_nchar*WS +c_national_char0         Rpublicmysql_string_typesbcharzc_national_char*EA + c_varchar0dRpublicmysql_string_typesb varcharz c_varchar*QM + +c_tinytext0         Rpublicmysql_string_typesb +tinytextz +c_tinytext*WS + c_mediumtext0         Rpublicmysql_string_typesb  +mediumtextz c_mediumtext*EA +c_text0         Rpublicmysql_string_typesbtextzc_text*QM + +c_longtext0         Rpublicmysql_string_typesb +longtextz +c_longtext*E A +c_json0         Rpublicmysql_string_typesbjsonzc_json*e +a +c_json_string_override0         Rpublicmysql_string_typesbjsonzc_json_string_override*Q M +c_enum0Rpublicmysql_string_typesbmysql_string_types_c_enumzc_enum*N J +c_set0Rpublicmysql_string_typesbmysql_string_types_c_setzc_set2 String types : query.sqlBmysql_string_typesЭ + +КINSERT INTO mysql_string_types +( + c_char, + c_nchar, + c_national_char, + c_varchar, + c_tinytext, + c_mediumtext, + c_text, + c_longtext, + c_json, + c_json_string_override, + c_enum, + c_set +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)InsertMysqlStringTypesBatch :copyfrom*EA +c_char0         Rpublicmysql_string_typesbcharzc_char*GC +c_nchar0         Rpublicmysql_string_typesbcharzc_nchar*WS +c_national_char0         Rpublicmysql_string_typesbcharzc_national_char*EA + c_varchar0dRpublicmysql_string_typesb varcharz c_varchar*QM + +c_tinytext0         Rpublicmysql_string_typesb +tinytextz +c_tinytext*WS + c_mediumtext0         Rpublicmysql_string_typesb  +mediumtextz c_mediumtext*EA +c_text0         Rpublicmysql_string_typesbtextzc_text*QM + +c_longtext0         Rpublicmysql_string_typesb +longtextz +c_longtext*E A +c_json0         Rpublicmysql_string_typesbjsonzc_json*e +a +c_json_string_override0         Rpublicmysql_string_typesbjsonzc_json_string_override*Q M +c_enum0Rpublicmysql_string_typesbmysql_string_types_c_enumzc_enum*N J +c_set0Rpublicmysql_string_typesbmysql_string_types_c_setzc_set: query.sqlBmysql_string_typesП +пSELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1GetMysqlStringTypes:one"9 +c_char0         Rmysql_string_typesbcharzc_char"; +c_nchar0         Rmysql_string_typesbcharzc_nchar"K +c_national_char0         Rmysql_string_typesbcharzc_national_char"9 + c_varchar0dRmysql_string_typesb varcharz c_varchar"E + +c_tinytext0         Rmysql_string_typesb +tinytextz +c_tinytext"K + c_mediumtext0         Rmysql_string_typesb  +mediumtextz c_mediumtext"9 +c_text0         Rmysql_string_typesbtextzc_text"E + +c_longtext0         Rmysql_string_typesb +longtextz +c_longtext"9 +c_json0         Rmysql_string_typesbjsonzc_json"Y +c_json_string_override0         Rmysql_string_typesbjsonzc_json_string_override"E +c_enum0Rmysql_string_typesbmysql_string_types_c_enumzc_enum"B +c_set0Rmysql_string_typesbmysql_string_types_c_setzc_set: query.sql╣ + +╢SELECT + COUNT(*) AS cnt, c_char, c_nchar, c_national_char, @@ -290,17 +362,8 @@ c_longtext"2 c_json_string_override, c_enum, c_set -FROM mysql_types +FROM mysql_string_types GROUP BY - c_bool, - c_boolean, - c_tinyint, - c_smallint, - c_mediumint, - c_int, - c_integer, - c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, @@ -313,50 +376,28 @@ GROUP BY c_json_string_override, c_enum, c_set -LIMIT 1GetMysqlTypesCnt:one" -cnt0         @bbigint", -c_bool0R  mysql_typesb tinyintzc_bool"2 - c_boolean0R  mysql_typesb tinyintz c_boolean"2 - c_tinyint0R  mysql_typesb tinyintz c_tinyint"> +LIMIT 1GetMysqlStringTypesCnt:one" +cnt0         @bbigint"9 +c_char0         Rmysql_string_typesbcharzc_char"; +c_nchar0         Rmysql_string_typesbcharzc_nchar"K +c_national_char0         Rmysql_string_typesbcharzc_national_char"9 + c_varchar0dRmysql_string_typesb varcharz c_varchar"E -c_smallint0         R  mysql_typesb -smallintz -c_smallint"A - c_mediumint0         R  mysql_typesb  mediumintz c_mediumint"/ -c_int0         R  mysql_typesbintzc_int"7 - c_integer0         R  mysql_typesbintz c_integer"8 -c_bigint0         R  mysql_typesbbigintzc_bigint"5 -c_float0         R  mysql_typesbfloatzc_float"2 - c_numeric0 -R  mysql_typesb decimalz c_numeric"2 - c_decimal0 -R  mysql_typesb decimalz c_decimal"* -c_dec0 -R  mysql_typesb decimalzc_dec". -c_fixed0 -R  mysql_typesb decimalzc_fixed"8 -c_double0         R  mysql_typesbdoublezc_double"L -c_double_precision0         R  mysql_typesbdoublezc_double_precision"2 -c_char0         R  mysql_typesbcharzc_char"4 -c_nchar0         R  mysql_typesbcharzc_nchar"D -c_national_char0         R  mysql_typesbcharzc_national_char"2 - c_varchar0dR  mysql_typesb varcharz c_varchar"> - -c_tinytext0         R  mysql_typesb +c_tinytext0         Rmysql_string_typesb tinytextz -c_tinytext"D - c_mediumtext0         R  mysql_typesb  -mediumtextz c_mediumtext"2 -c_text0         R  mysql_typesbtextzc_text"> +c_tinytext"K + c_mediumtext0         Rmysql_string_typesb  +mediumtextz c_mediumtext"9 +c_text0         Rmysql_string_typesbtextzc_text"E -c_longtext0         R  mysql_typesb +c_longtext0         Rmysql_string_typesb longtextz -c_longtext"2 -c_json0         R  mysql_typesbjsonzc_json"R -c_json_string_override0         R  mysql_typesbjsonzc_json_string_override"7 -c_enum0R  mysql_typesbmysql_types_c_enumzc_enum"4 -c_set0R  mysql_typesbmysql_types_c_setzc_set: query.sqlB -TRUNCATE TABLE mysql_typesTruncateMysqlTypes:exec: query.sql╚ +c_longtext"9 +c_json0         Rmysql_string_typesbjsonzc_json"Y +c_json_string_override0         Rmysql_string_typesbjsonzc_json_string_override"E +c_enum0Rmysql_string_typesbmysql_string_types_c_enumzc_enum"B +c_set0Rmysql_string_typesbmysql_string_types_c_setzc_set: query.sqlO +!TRUNCATE TABLE mysql_string_typesTruncateMysqlStringTypes:exec: query.sql╚ В INSERT INTO mysql_datetime_types ( @@ -534,13 +575,14 @@ mediumblobz c_mediumblob"E c_longblob0         Rmysql_binary_typesb longblobz c_longblob: query.sqlO -!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sql╜ -Ь +!TRUNCATE TABLE mysql_binary_typesTruncateMysqlBinaryTypes:exec: query.sqlу +┬ SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_types +FROM mysql_numeric_types +CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_typesGetMysqlFunctions:one" max_int0         @bany"# max_varchar0         @bany"% diff --git a/examples/config/mysql/types/query.sql b/examples/config/mysql/types/query.sql index a06ec5ba..de837a17 100644 --- a/examples/config/mysql/types/query.sql +++ b/examples/config/mysql/types/query.sql @@ -1,7 +1,7 @@ -/* Basic types */ +/* Numeric types */ --- name: InsertMysqlTypes :exec -INSERT INTO mysql_types +-- name: InsertMysqlNumericTypes :exec +INSERT INTO mysql_numeric_types ( c_bool, c_boolean, @@ -11,7 +11,85 @@ INSERT INTO mysql_types c_int, c_integer, c_bigint, - c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision, + c_decimal, + c_dec, + c_numeric, + c_fixed, + c_float, + c_double, + c_double_precision +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: InsertMysqlNumericTypesBatch :copyfrom +INSERT INTO mysql_numeric_types +( + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision +) +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); + +-- name: GetMysqlNumericTypes :one +SELECT * FROM mysql_numeric_types LIMIT 1; + +-- name: GetMysqlNumericTypesCnt :one +SELECT + COUNT(*) AS cnt, + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision +FROM mysql_numeric_types +GROUP BY + c_bool, + c_boolean, + c_tinyint, + c_smallint, + c_mediumint, + c_int, + c_integer, + c_bigint, + c_float, + c_numeric, + c_decimal, + c_dec, + c_fixed, + c_double, + c_double_precision +LIMIT 1; + +-- name: TruncateMysqlNumericTypes :exec +TRUNCATE TABLE mysql_numeric_types; + +/* String types */ + +-- name: InsertMysqlStringTypes :exec +INSERT INTO mysql_string_types +( c_char, c_nchar, c_national_char, @@ -25,20 +103,11 @@ INSERT INTO mysql_types c_enum, c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); --- name: InsertMysqlTypesBatch :copyfrom -INSERT INTO mysql_types +-- name: InsertMysqlStringTypesBatch :copyfrom +INSERT INTO mysql_string_types ( - c_bool, - c_boolean, - c_tinyint, - c_smallint, - c_mediumint, - c_int, - c_integer, - c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, @@ -52,29 +121,14 @@ INSERT INTO mysql_types c_enum, c_set ) -VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); +VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?); --- name: GetMysqlTypes :one -SELECT * FROM mysql_types LIMIT 1; +-- name: GetMysqlStringTypes :one +SELECT * FROM mysql_string_types LIMIT 1; --- name: GetMysqlTypesCnt :one +-- name: GetMysqlStringTypesCnt :one SELECT COUNT(*) AS cnt, - c_bool, - c_boolean, - c_tinyint, - c_smallint, - c_mediumint, - c_int, - c_integer, - c_bigint, - c_float, - c_numeric, - c_decimal, - c_dec, - c_fixed, - c_double, - c_double_precision, c_char, c_nchar, c_national_char, @@ -87,17 +141,8 @@ SELECT c_json_string_override, c_enum, c_set -FROM mysql_types +FROM mysql_string_types GROUP BY - c_bool, - c_boolean, - c_tinyint, - c_smallint, - c_mediumint, - c_int, - c_integer, - c_bigint, - c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision, c_char, c_nchar, c_national_char, @@ -112,8 +157,8 @@ GROUP BY c_set LIMIT 1; --- name: TruncateMysqlTypes :exec -TRUNCATE TABLE mysql_types; +-- name: TruncateMysqlStringTypes :exec +TRUNCATE TABLE mysql_string_types; /* Datetime types */ @@ -224,5 +269,6 @@ SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM mysql_types +FROM mysql_numeric_types +CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types; diff --git a/examples/config/mysql/types/schema.sql b/examples/config/mysql/types/schema.sql index 7a70ab09..e5755cf5 100644 --- a/examples/config/mysql/types/schema.sql +++ b/examples/config/mysql/types/schema.sql @@ -1,36 +1,34 @@ -CREATE TABLE mysql_types ( - /* Numeric data types */ - c_bool BOOL, - c_boolean BOOLEAN, - c_tinyint TINYINT(3), - c_smallint SMALLINT, - c_mediumint MEDIUMINT, - c_int INT, - c_integer INTEGER, - c_bigint BIGINT, - c_float FLOAT, - c_decimal DECIMAL(10,7), - c_dec DEC(10,7), - c_numeric NUMERIC(10,7), - c_fixed FIXED(10,7), - c_double DOUBLE, - c_double_precision DOUBLE PRECISION, - - /* String data types */ - c_char CHAR, - c_nchar NCHAR, - c_national_char NATIONAL CHAR, - c_varchar VARCHAR(100), - c_tinytext TINYTEXT, - c_mediumtext MEDIUMTEXT, - c_text TEXT, - c_longtext LONGTEXT, - c_json JSON, - c_json_string_override JSON, +CREATE TABLE mysql_numeric_types ( + c_bool BOOL, + c_boolean BOOLEAN, + c_tinyint TINYINT(3), + c_smallint SMALLINT, + c_mediumint MEDIUMINT, + c_int INT, + c_integer INTEGER, + c_bigint BIGINT, + c_float FLOAT, + c_decimal DECIMAL(10, 7), + c_dec DEC(10, 7), + c_numeric NUMERIC(10, 7), + c_fixed FIXED(10, 7), + c_double DOUBLE, + c_double_precision DOUBLE PRECISION +); - /* Pre-defined types */ - c_enum ENUM ('small', 'medium', 'big'), - c_set SET ('tea', 'coffee', 'milk') +CREATE TABLE mysql_string_types ( + c_char CHAR, + c_nchar NCHAR, + c_national_char NATIONAL CHAR, + c_varchar VARCHAR(100), + c_tinytext TINYTEXT, + c_mediumtext MEDIUMTEXT, + c_text TEXT, + c_longtext LONGTEXT, + c_json JSON, + c_json_string_override JSON, + c_enum ENUM ('small', 'medium', 'big'), + c_set SET ('tea', 'coffee', 'milk') ); CREATE TABLE mysql_datetime_types ( From e1171474ec8fd8dd92625295aabc703a2dbc8c1b Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Thu, 14 Aug 2025 16:53:29 +0200 Subject: [PATCH 12/33] fix: re-generate code --- .../MySqlConnectorDapperExample/request.json | 2 +- .../request.message | Bin 25756 -> 25750 bytes .../request.json | 2 +- .../request.message | 16 ++++++++-------- examples/MySqlConnectorExample/request.json | 2 +- .../MySqlConnectorExample/request.message | Bin 25740 -> 25734 bytes .../MySqlConnectorLegacyExample/request.json | 2 +- .../request.message | 16 ++++++++-------- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index 2bbf9802..f8f7e330 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -2128,7 +2128,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\nLIMIT 1", + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nLIMIT 1", "name": "GetMysqlNumericTypesCnt", "cmd": ":one", "columns": [ diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 02755123bd9cc3901f98234c191ef6ef31f0692f..1864e1375ba0f3cc3cf3454948d4fdaa753a0b4d 100644 GIT binary patch delta 33 pcmbPpl5yHe#tn}RSsQq`elt&=5UaeI+o+Ura Date: Fri, 15 Aug 2025 01:04:28 +0200 Subject: [PATCH 13/33] fix: separate sqlite schema and query files by type and authors --- docs/ExamplesDocGen/Program.cs | 9 +- end2end/EndToEndCommon/EndToEndCommon.cs | 22 +- end2end/EndToEndCommon/EndToEndCommon.csproj | 10 - end2end/EndToEndTests/EndToEndTests.csproj | 8 +- .../EndToEndTestsLegacy.csproj | 8 +- examples/SqliteDapperExample/Models.cs | 14 +- examples/SqliteDapperExample/QuerySql.cs | 300 ++--- examples/SqliteDapperExample/request.json | 1130 +++++++++-------- examples/SqliteDapperExample/request.message | Bin 7813 -> 7906 bytes examples/SqliteDapperLegacyExample/Models.cs | 14 +- .../SqliteDapperLegacyExample/QuerySql.cs | 300 ++--- .../SqliteDapperLegacyExample/request.json | 1130 +++++++++-------- .../SqliteDapperLegacyExample/request.message | Bin 7847 -> 7940 bytes examples/SqliteExample/Models.cs | 4 +- examples/SqliteExample/QuerySql.cs | 512 ++++---- examples/SqliteExample/request.json | 1130 +++++++++-------- examples/SqliteExample/request.message | Bin 7797 -> 7890 bytes examples/SqliteLegacyExample/Models.cs | 14 +- examples/SqliteLegacyExample/QuerySql.cs | 752 +++++------ examples/SqliteLegacyExample/request.json | 1130 +++++++++-------- examples/SqliteLegacyExample/request.message | Bin 7831 -> 7924 bytes .../config/sqlite/{ => authors}/query.sql | 34 +- .../config/sqlite/{ => authors}/schema.sql | 9 +- examples/config/sqlite/types/query.sql | 31 + examples/config/sqlite/types/schema.sql | 6 + sqlc.ci.yaml | 16 +- sqlc.local.generated.yaml | 16 +- sqlc.request.generated.yaml | 16 +- 28 files changed, 3313 insertions(+), 3302 deletions(-) rename examples/config/sqlite/{ => authors}/query.sql (71%) rename examples/config/sqlite/{ => authors}/schema.sql (74%) create mode 100644 examples/config/sqlite/types/query.sql create mode 100644 examples/config/sqlite/types/schema.sql diff --git a/docs/ExamplesDocGen/Program.cs b/docs/ExamplesDocGen/Program.cs index ce94ec83..2538b48d 100644 --- a/docs/ExamplesDocGen/Program.cs +++ b/docs/ExamplesDocGen/Program.cs @@ -26,11 +26,10 @@ public static void Main() private static string ParseConfigNode(YamlNode node) { var item = (YamlMappingNode)node; - var queryFiles = item["queries"].ToString(); var codegenArray = (YamlSequenceNode)item["codegen"]; - var firstCodegenObj = (YamlMappingNode)codegenArray.Children[0]; + var codegenObj = (YamlMappingNode)codegenArray.Children[0]; - var outputDirectory = firstCodegenObj["out"].ToString(); + var outputDirectory = codegenObj["out"].ToString(); var projectName = outputDirectory.Replace("examples/", ""); var testProject = projectName.Contains("Legacy") ? "EndToEndTestsLegacy" : "EndToEndTests"; var testClassName = projectName.Replace("Example", "Tester"); @@ -38,7 +37,7 @@ private static string ParseConfigNode(YamlNode node) testClassName = testClassName.Replace("Legacy", ""); var yamlStream = new YamlStream(); - var yamlDocument = new YamlDocument(firstCodegenObj["options"]); + var yamlDocument = new YamlDocument(codegenObj["options"]); yamlStream.Documents.Add(yamlDocument); using var optionsWriter = new StringWriter(); yamlStream.Save(optionsWriter, false); @@ -49,7 +48,7 @@ private static string ParseConfigNode(YamlNode node) {projectName.Replace("Example", "")} ## Engine `{item["engine"]}`: [{projectName}]({outputDirectory}) - ### [Schema]({item["schema"]}) | [Queries]({queryFiles}) | [End2End Test](end2end/{testProject}/{testClassName}.cs) + ### [Schema]({item["schema"][0]}) | [Queries]({item["queries"][0]}) | [End2End Test](end2end/{testProject}/{testClassName}.cs) ### Config ```yaml {optionsStr}``` diff --git a/end2end/EndToEndCommon/EndToEndCommon.cs b/end2end/EndToEndCommon/EndToEndCommon.cs index d926542e..d2866625 100644 --- a/end2end/EndToEndCommon/EndToEndCommon.cs +++ b/end2end/EndToEndCommon/EndToEndCommon.cs @@ -9,7 +9,10 @@ namespace EndToEndTests public static class EndToEndCommon { private const string EnvFile = ".env"; - private const string SchemaFile = "sqlite.schema.sql"; + private static readonly string[] SchemaFiles = new string[] { + "authors.sqlite.schema.sql", + "types.sqlite.schema.sql" + }; public const string PostgresConnectionStringEnv = "POSTGRES_CONNECTION_STRING"; public const string MySqlConnectionStringEnv = "MYSQL_CONNECTION_STRING"; @@ -20,8 +23,7 @@ public static void SetUp() if (File.Exists(EnvFile)) DotEnv.Load(options: new DotEnvOptions(envFilePaths: new[] { EnvFile })); RemoveExistingSqliteDb(); - if (File.Exists(SchemaFile)) - InitSqliteDb(); + InitSqliteDb(); } public static void TearDown() @@ -49,15 +51,21 @@ private static void RemoveExistingSqliteDb() } private static void InitSqliteDb() { - var schemaSql = File.ReadAllText(SchemaFile); var connectionString = Environment.GetEnvironmentVariable(EndToEndCommon.SqliteConnectionStringEnv); using (var connection = new SqliteConnection(connectionString)) { connection.Open(); - using (var command = connection.CreateCommand()) + foreach (var schemaFile in SchemaFiles) { - command.CommandText = schemaSql; - command.ExecuteNonQuery(); + if (!File.Exists(schemaFile)) + continue; + + var schemaSql = File.ReadAllText(schemaFile); + using (var command = connection.CreateCommand()) + { + command.CommandText = schemaSql; + command.ExecuteNonQuery(); + } } } } diff --git a/end2end/EndToEndCommon/EndToEndCommon.csproj b/end2end/EndToEndCommon/EndToEndCommon.csproj index 1883febe..e254c45c 100644 --- a/end2end/EndToEndCommon/EndToEndCommon.csproj +++ b/end2end/EndToEndCommon/EndToEndCommon.csproj @@ -5,16 +5,6 @@ EndToEndTests - - - Always - - - Always - sqlite.schema.sql - - - diff --git a/end2end/EndToEndTests/EndToEndTests.csproj b/end2end/EndToEndTests/EndToEndTests.csproj index babe0a3c..a8a8e10f 100644 --- a/end2end/EndToEndTests/EndToEndTests.csproj +++ b/end2end/EndToEndTests/EndToEndTests.csproj @@ -10,9 +10,13 @@ Always - + Always - sqlite.schema.sql + authors.sqlite.schema.sql + + + Always + types.sqlite.schema.sql diff --git a/end2end/EndToEndTestsLegacy/EndToEndTestsLegacy.csproj b/end2end/EndToEndTestsLegacy/EndToEndTestsLegacy.csproj index c8c384b9..c20bbe10 100644 --- a/end2end/EndToEndTestsLegacy/EndToEndTestsLegacy.csproj +++ b/end2end/EndToEndTestsLegacy/EndToEndTestsLegacy.csproj @@ -9,9 +9,13 @@ Always - + Always - sqlite.schema.sql + authors.sqlite.schema.sql + + + Always + types.sqlite.schema.sql diff --git a/examples/SqliteDapperExample/Models.cs b/examples/SqliteDapperExample/Models.cs index 8518a279..1d1baaed 100644 --- a/examples/SqliteDapperExample/Models.cs +++ b/examples/SqliteDapperExample/Models.cs @@ -2,6 +2,13 @@ using System.Linq; namespace SqliteDapperExampleGen; +public class TypesSqlite +{ + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } +}; public class Author { public required int Id { get; init; } @@ -14,11 +21,4 @@ public class Book public required string Name { get; init; } public required int AuthorId { get; init; } public string? Description { get; init; } -}; -public class TypesSqlite -{ - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } }; \ No newline at end of file diff --git a/examples/SqliteDapperExample/QuerySql.cs b/examples/SqliteDapperExample/QuerySql.cs index 5ada63bb..5f98a5fb 100644 --- a/examples/SqliteDapperExample/QuerySql.cs +++ b/examples/SqliteDapperExample/QuerySql.cs @@ -37,6 +37,156 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction? Transaction { get; } private string? ConnectionString { get; } + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public class InsertSqliteTypesArgs + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + }; + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_blob", args.CBlob); + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public class InsertSqliteTypesBatchArgs + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + }; + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + }; + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + public required int Cnt { get; init; } + }; + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow + { + public int? MaxInteger { get; init; } + public required decimal MaxReal { get; init; } + public object? MaxText { get; init; } + }; + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(DeleteAllSqliteTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -529,154 +679,4 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; - public class InsertSqliteTypesArgs - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } - }; - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_blob", args.CBlob); - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } - }; - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } - public required int Cnt { get; init; } - }; - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow - { - public int? MaxInteger { get; init; } - public required decimal MaxReal { get; init; } - public object? MaxText { get; init; } - }; - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); - } } \ No newline at end of file diff --git a/examples/SqliteDapperExample/request.json b/examples/SqliteDapperExample/request.json index 89eef44a..58364ca3 100644 --- a/examples/SqliteDapperExample/request.json +++ b/examples/SqliteDapperExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/schema.sql" + "examples/config/sqlite/types/schema.sql", + "examples/config/sqlite/authors/schema.sql" ], "queries": [ - "examples/config/sqlite/query.sql" + "examples/config/sqlite/types/query.sql", + "examples/config/sqlite/authors/query.sql" ], "codegen": { "out": "examples/SqliteDapperExample", @@ -25,46 +27,54 @@ "tables": [ { "rel": { - "name": "authors" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" } + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + } } ] }, { "rel": { - "name": "books" + "name": "authors" }, "columns": [ { @@ -72,7 +82,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -83,28 +93,17 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - } - }, - { - "name": "description", + "name": "bio", "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -114,47 +113,50 @@ }, { "rel": { - "name": "types_sqlite" + "name": "books" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" } }, { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" } }, { - "name": "c_blob", + "name": "description", "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] @@ -165,467 +167,287 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "main", + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "offset", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "TEXT" + }, + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "limit", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "BLOB" + }, + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "types_sqlite" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - } + "originalName": "c_integer" }, { - "number": 2, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "originalName": "id" + "originalName": "c_real" }, { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "name" + "originalName": "c_text" }, { - "name": "bio", + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" }, { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "max_integer", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "INTEGER" - }, - "originalName": "id" + "name": "any" + } }, { - "name": "name", - "notNull": true, + "name": "max_real", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" + } }, { - "name": "bio", + "name": "max_text", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" } } ], "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ - { - "number": 1, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", + "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", "columns": [ { "name": "id", @@ -667,26 +489,24 @@ { "number": 1, "column": { - "name": "ids", + "name": "name", "notNull": true, "length": -1, - "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", "cmd": ":many", "columns": [ { @@ -729,52 +549,59 @@ { "number": 1, "column": { - "name": "ids", + "name": "offset", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "INTEGER" - }, - "isSqlcSlice": true, - "originalName": "id" + "name": "integer" + } } }, { "number": 2, "column": { - "name": "names", + "name": "limit", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "TEXT" - }, - "isSqlcSlice": true, - "originalName": "name" + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", "cmd": ":exec", "parameters": [ { "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, "column": { "name": "name", "notNull": true, "length": -1, "table": { + "schema": "main", "name": "authors" }, "type": { @@ -782,13 +609,31 @@ }, "originalName": "name" } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } }, { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", "cmd": ":execlastid", "columns": [ { @@ -796,7 +641,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -813,7 +658,7 @@ "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -824,76 +669,159 @@ { "number": 2, "column": { - "name": "author_id", - "notNull": true, + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "authors" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "books", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "authors", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", "cmd": ":many", "columns": [ { @@ -930,25 +858,17 @@ "name": "TEXT" }, "originalName": "bio" - }, - { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } } ], "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -960,286 +880,368 @@ "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (\n ?, ?, ?, ?\n)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_real", + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "isSqlcSlice": true, + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 3, + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "names", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "isSqlcSlice": true, + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "number": 2, + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { - "number": 3, + "number": 2, "column": { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "books" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "c_integer" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_real", + "name": "books", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_text", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" - }, - { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" }, { - "name": "cnt", - "notNull": true, + "name": "books", "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "type": {}, + "embedTable": { + "name": "books" } } ], - "filename": "query.sql" - }, - { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", - "columns": [ - { - "name": "max_integer", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, - { - "name": "max_real", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, + "parameters": [ { - "name": "max_text", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteDapperExample/request.message b/examples/SqliteDapperExample/request.message index b774d0fe7120f2830bbdcfcf6a69455b93bcea1d..7c62d128b4a74e3b101037dd360382479840549a 100644 GIT binary patch delta 196 zcmZp*ePk=Z)z8YsXe7i|T$qztk}9P>Q9xX%q_QBjSid+qBQ-Zs4=5p}2^1AcEG@~% zFG3bqQUi($LR1%)rWRF#6f0=}1=*pBH+ueNoLt3xcC!nU8KYbuGgpwOU$ARXh=QkI zh`$0@XFSAa1+K|+MOB2k!W?}~$2NX!A|cvuvzVy-ZvdllO5dPVVF9-(1bY LBDi^(%zZ`xDQG?= delta 116 zcmaE4+iEMowT^|0(MX7`xG*QPBvndrqJTJ0adJj#ZlWGgNJ#<6-?;i0(); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_blob", args.CBlob); + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public class InsertSqliteTypesBatchArgs + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + }; + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + public int Cnt { get; set; } + }; + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow + { + public int? MaxInteger { get; set; } + public decimal MaxReal { get; set; } + public object MaxText { get; set; } + }; + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(DeleteAllSqliteTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -530,155 +680,5 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } - - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; - public class InsertSqliteTypesArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_blob", args.CBlob); - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - public int Cnt { get; set; } - }; - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow - { - public int? MaxInteger { get; set; } - public decimal MaxReal { get; set; } - public object MaxText { get; set; } - }; - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); - } } } \ No newline at end of file diff --git a/examples/SqliteDapperLegacyExample/request.json b/examples/SqliteDapperLegacyExample/request.json index d7a8b08b..859f4fa1 100644 --- a/examples/SqliteDapperLegacyExample/request.json +++ b/examples/SqliteDapperLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/schema.sql" + "examples/config/sqlite/types/schema.sql", + "examples/config/sqlite/authors/schema.sql" ], "queries": [ - "examples/config/sqlite/query.sql" + "examples/config/sqlite/types/query.sql", + "examples/config/sqlite/authors/query.sql" ], "codegen": { "out": "examples/SqliteDapperLegacyExample", @@ -25,46 +27,54 @@ "tables": [ { "rel": { - "name": "authors" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" } + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + } } ] }, { "rel": { - "name": "books" + "name": "authors" }, "columns": [ { @@ -72,7 +82,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -83,28 +93,17 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - } - }, - { - "name": "description", + "name": "bio", "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -114,47 +113,50 @@ }, { "rel": { - "name": "types_sqlite" + "name": "books" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" } }, { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" } }, { - "name": "c_blob", + "name": "description", "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] @@ -165,467 +167,287 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "main", + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "offset", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "TEXT" + }, + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "limit", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "BLOB" + }, + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "types_sqlite" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - } + "originalName": "c_integer" }, { - "number": 2, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "originalName": "id" + "originalName": "c_real" }, { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "name" + "originalName": "c_text" }, { - "name": "bio", + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" }, { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "max_integer", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "INTEGER" - }, - "originalName": "id" + "name": "any" + } }, { - "name": "name", - "notNull": true, + "name": "max_real", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" + } }, { - "name": "bio", + "name": "max_text", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" } } ], "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ - { - "number": 1, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", + "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", "columns": [ { "name": "id", @@ -667,26 +489,24 @@ { "number": 1, "column": { - "name": "ids", + "name": "name", "notNull": true, "length": -1, - "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", "cmd": ":many", "columns": [ { @@ -729,52 +549,59 @@ { "number": 1, "column": { - "name": "ids", + "name": "offset", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "INTEGER" - }, - "isSqlcSlice": true, - "originalName": "id" + "name": "integer" + } } }, { "number": 2, "column": { - "name": "names", + "name": "limit", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "TEXT" - }, - "isSqlcSlice": true, - "originalName": "name" + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", "cmd": ":exec", "parameters": [ { "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, "column": { "name": "name", "notNull": true, "length": -1, "table": { + "schema": "main", "name": "authors" }, "type": { @@ -782,13 +609,31 @@ }, "originalName": "name" } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } }, { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", "cmd": ":execlastid", "columns": [ { @@ -796,7 +641,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -813,7 +658,7 @@ "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -824,76 +669,159 @@ { "number": 2, "column": { - "name": "author_id", - "notNull": true, + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "authors" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "books", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "authors", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", "cmd": ":many", "columns": [ { @@ -930,25 +858,17 @@ "name": "TEXT" }, "originalName": "bio" - }, - { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } } ], "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -960,286 +880,368 @@ "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (\n ?, ?, ?, ?\n)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_real", + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "isSqlcSlice": true, + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 3, + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "names", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "isSqlcSlice": true, + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "number": 2, + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { - "number": 3, + "number": 2, "column": { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "books" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "c_integer" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_real", + "name": "books", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_text", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" - }, - { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" }, { - "name": "cnt", - "notNull": true, + "name": "books", "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "type": {}, + "embedTable": { + "name": "books" } } ], - "filename": "query.sql" - }, - { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", - "columns": [ - { - "name": "max_integer", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, - { - "name": "max_real", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, + "parameters": [ { - "name": "max_text", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteDapperLegacyExample/request.message b/examples/SqliteDapperLegacyExample/request.message index 8b455052ef1ebc59c9f636ba775b1efed2dc62ca..a304f552c55c5f6cdf914e0bdd1e63bb5afd4d4e 100644 GIT binary patch delta 172 zcmZ2(+hQlcwV0KQ(MX7`xG*QPBvnd%qJX$iNo7H5v3_xKMrv-N9#BF`6DTT@SXz>i zUxX~Kqy`ihgs3hoO)aVfDOS<|3bI2LZ}jA6n%u{Hc5?)i86!&{Ggr`LU2#c9uE`rk uRfM_19DPDvgB3LFbrhgz^IOrgY^+kfOk5U|4Y_qU_pq=FZr&yHmJtB^*fbpg delta 109 zcmZp%TW%}Bb%=$F(MX7`xG*QPBvndrqJTJ0adJj#ZlWGgNJ#<6-?*BOX>%L18RO)g zOlLRW=e7`Jm72uN6*f7L&3N*Db`B{`g)m2-P}g7u4K4*Bu-AbhuFZn1XW2H3%f4j< E0KvT;C;$Ke diff --git a/examples/SqliteExample/Models.cs b/examples/SqliteExample/Models.cs index e266ada0..54c720b5 100644 --- a/examples/SqliteExample/Models.cs +++ b/examples/SqliteExample/Models.cs @@ -2,6 +2,6 @@ using System.Linq; namespace SqliteExampleGen; +public readonly record struct TypesSqlite(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); public readonly record struct Author(int Id, string Name, string? Bio); -public readonly record struct Book(int Id, string Name, int AuthorId, string? Description); -public readonly record struct TypesSqlite(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); \ No newline at end of file +public readonly record struct Book(int Id, string Name, int AuthorId, string? Description); \ No newline at end of file diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index c9d48511..a9f84af4 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -34,6 +34,262 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction? Transaction { get; } private string? ConnectionString { get; } + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertSqliteTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public readonly record struct InsertSqliteTypesBatchArgs(int? CInteger, decimal? CReal, string? CText); + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public readonly record struct GetSqliteTypesRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + + return null; + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + + return null; + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteFunctionsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + + return null; + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAllSqliteTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public readonly record struct GetAuthorRow(int Id, string Name, string? Bio); public readonly record struct GetAuthorArgs(string Name); @@ -690,260 +946,4 @@ public async Task DeleteAllAuthors() await command.ExecuteNonQueryAsync(); } } - - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; - public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) - { - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertSqliteTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public readonly record struct InsertSqliteTypesBatchArgs(int? CInteger, decimal? CReal, string? CText); - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public readonly record struct GetSqliteTypesRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - - return null; - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - - return null; - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteFunctionsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - - return null; - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = DeleteAllSqliteTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } } \ No newline at end of file diff --git a/examples/SqliteExample/request.json b/examples/SqliteExample/request.json index 14a1c32d..a150f65e 100644 --- a/examples/SqliteExample/request.json +++ b/examples/SqliteExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/schema.sql" + "examples/config/sqlite/types/schema.sql", + "examples/config/sqlite/authors/schema.sql" ], "queries": [ - "examples/config/sqlite/query.sql" + "examples/config/sqlite/types/query.sql", + "examples/config/sqlite/authors/query.sql" ], "codegen": { "out": "examples/SqliteExample", @@ -25,46 +27,54 @@ "tables": [ { "rel": { - "name": "authors" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" } + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + } } ] }, { "rel": { - "name": "books" + "name": "authors" }, "columns": [ { @@ -72,7 +82,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -83,28 +93,17 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - } - }, - { - "name": "description", + "name": "bio", "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -114,47 +113,50 @@ }, { "rel": { - "name": "types_sqlite" + "name": "books" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" } }, { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" } }, { - "name": "c_blob", + "name": "description", "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] @@ -165,467 +167,287 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "main", + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "offset", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "TEXT" + }, + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "limit", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "BLOB" + }, + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "types_sqlite" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - } + "originalName": "c_integer" }, { - "number": 2, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "originalName": "id" + "originalName": "c_real" }, { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "name" + "originalName": "c_text" }, { - "name": "bio", + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" }, { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "max_integer", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "INTEGER" - }, - "originalName": "id" + "name": "any" + } }, { - "name": "name", - "notNull": true, + "name": "max_real", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" + } }, { - "name": "bio", + "name": "max_text", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" } } ], "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ - { - "number": 1, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", + "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", "columns": [ { "name": "id", @@ -667,26 +489,24 @@ { "number": 1, "column": { - "name": "ids", + "name": "name", "notNull": true, "length": -1, - "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", "cmd": ":many", "columns": [ { @@ -729,52 +549,59 @@ { "number": 1, "column": { - "name": "ids", + "name": "offset", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "INTEGER" - }, - "isSqlcSlice": true, - "originalName": "id" + "name": "integer" + } } }, { "number": 2, "column": { - "name": "names", + "name": "limit", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "TEXT" - }, - "isSqlcSlice": true, - "originalName": "name" + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", "cmd": ":exec", "parameters": [ { "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, "column": { "name": "name", "notNull": true, "length": -1, "table": { + "schema": "main", "name": "authors" }, "type": { @@ -782,13 +609,31 @@ }, "originalName": "name" } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } }, { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", "cmd": ":execlastid", "columns": [ { @@ -796,7 +641,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -813,7 +658,7 @@ "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -824,76 +669,159 @@ { "number": 2, "column": { - "name": "author_id", - "notNull": true, + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "authors" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "books", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "authors", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", "cmd": ":many", "columns": [ { @@ -930,25 +858,17 @@ "name": "TEXT" }, "originalName": "bio" - }, - { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } } ], "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -960,286 +880,368 @@ "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (\n ?, ?, ?, ?\n)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_real", + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "isSqlcSlice": true, + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 3, + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "names", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "isSqlcSlice": true, + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "number": 2, + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { - "number": 3, + "number": 2, "column": { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "books" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "c_integer" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_real", + "name": "books", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_text", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" - }, - { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" }, { - "name": "cnt", - "notNull": true, + "name": "books", "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "type": {}, + "embedTable": { + "name": "books" } } ], - "filename": "query.sql" - }, - { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", - "columns": [ - { - "name": "max_integer", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, - { - "name": "max_real", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, + "parameters": [ { - "name": "max_text", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteExample/request.message b/examples/SqliteExample/request.message index 84b7f159f4c4f4dfc0df481b799c3ae9ccdc3830..c197954552acfebf6c6b697ae8f2e1762b42d2f4 100644 GIT binary patch delta 175 zcmexrbIDeKtA&+|(MX7`xG*QPBvnd%qJX$iNo7H5v3_xKMrv-N9#BF`6DTT@SXz>i zUxX~Kqy`ihgs3hoO)aVfDOS<|3bI2LZ}j}YI60sB>}E?QGe(v^X0D*geBzQ!TpE+P xr4&Uq6}ZA2eL`J>6*TO16rgDHInlFhtWv#9To#ir@+fXjWDyqJJX_`#BLL^iH4p#* delta 113 zcmca)`_)E(YcUHKqmd9>abZqoNvf3ML;-Q0;^d6f+(bQ~kdgwBzj5_@#?9%>W{i_( zF`eCffZIZlRcaD5SJ>o*ERvJG*`y{fVCRt1R0wnQ33Uxt(BM)40(%`8;@bR) args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + + return null; + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + public int Cnt { get; set; } + }; + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + + return null; + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow + { + public int? MaxInteger { get; set; } + public decimal MaxReal { get; set; } + public object MaxText { get; set; } + }; + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + { + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + } + + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteFunctionsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + + return null; + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAllSqliteTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -558,224 +843,22 @@ public async Task> GetAuthorsByIdsAndNames(GetA } } } - - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs - { - public string Name { get; set; } - }; - public async Task DeleteAuthor(DeleteAuthorArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAuthorSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = DeleteAuthorSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - await command.ExecuteNonQueryAsync(); - } - } - - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public int Id { get; set; } - }; - public class CreateBookArgs - { - public string Name { get; set; } - public int AuthorId { get; set; } - }; - public async Task CreateBook(CreateBookArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(CreateBookSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt32(result); - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateBookSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt32(result); - } - } - - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow - { - public Author Author { get; set; } - public Book Book { get; set; } - }; - public async Task> ListAllAuthorsBooks() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(ListAllAuthorsBooksSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow - { - public Author Author { get; set; } - public Author Author2 { get; set; } - }; - public async Task> GetDuplicateAuthors() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetDuplicateAuthorsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } - } - - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public int Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs - { - public string Name { get; set; } - }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetAuthorsByBookNameSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } - } - - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllAuthorsSql, connection)) + using (var command = new SqliteCommand(DeleteAuthorSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -787,279 +870,196 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAllAuthorsSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES ( @c_integer, @c_real, @c_text, @c_blob )"; - public class InsertSqliteTypesArgs + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } + public int Id { get; set; } }; - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + public class CreateBookArgs + { + public string Name { get; set; } + public int AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) + using (var command = new SqliteCommand(CreateBookSql, connection)) { - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt32(result); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertSqliteTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt32(result); } } - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } + public Author Author { get; set; } + public Book Book { get; set; } }; - public async Task GetSqliteTypes() + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) + using (var command = new SqliteCommand(ListAllAuthorsBooksSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetSqliteTypesSql; + command.CommandText = ListAllAuthorsBooksSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } - - return null; } - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - public int Cnt { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetSqliteTypesCnt() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) + using (var command = new SqliteCommand(GetDuplicateAuthorsSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetSqliteTypesCntSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public int? MaxInteger { get; set; } - public decimal MaxReal { get; set; } - public object MaxText { get; set; } + public int Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } }; - public async Task GetSqliteFunctions() + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) + using (var command = new SqliteCommand(GetAuthorsByBookNameSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetSqliteFunctionsSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } - - return null; } - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) + using (var command = new SqliteCommand(DeleteAllAuthorsSql, connection)) { await command.ExecuteNonQueryAsync(); } @@ -1072,7 +1072,7 @@ public async Task DeleteAllSqliteTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAllSqliteTypesSql; + command.CommandText = DeleteAllAuthorsSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } diff --git a/examples/SqliteLegacyExample/request.json b/examples/SqliteLegacyExample/request.json index b2a9fb66..700c30b4 100644 --- a/examples/SqliteLegacyExample/request.json +++ b/examples/SqliteLegacyExample/request.json @@ -3,10 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/schema.sql" + "examples/config/sqlite/types/schema.sql", + "examples/config/sqlite/authors/schema.sql" ], "queries": [ - "examples/config/sqlite/query.sql" + "examples/config/sqlite/types/query.sql", + "examples/config/sqlite/authors/query.sql" ], "codegen": { "out": "examples/SqliteLegacyExample", @@ -25,46 +27,54 @@ "tables": [ { "rel": { - "name": "authors" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" } + }, + { + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + } } ] }, { "rel": { - "name": "books" + "name": "authors" }, "columns": [ { @@ -72,7 +82,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -83,28 +93,17 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - } - }, - { - "name": "description", + "name": "bio", "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -114,47 +113,50 @@ }, { "rel": { - "name": "types_sqlite" + "name": "books" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" } }, { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" } }, { - "name": "c_blob", + "name": "description", "length": -1, "table": { - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] @@ -165,467 +167,287 @@ }, "queries": [ { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "main", + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "offset", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "TEXT" + }, + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "limit", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, + "table": { + "schema": "main", + "name": "types_sqlite" + }, "type": { - "name": "integer" - } + "name": "BLOB" + }, + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "types_sqlite" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - } + "originalName": "c_integer" }, { - "number": 2, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "originalName": "id" + "originalName": "c_real" }, { - "name": "name", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "name" + "originalName": "c_text" }, { - "name": "bio", + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } + "name": "c_blob", + "length": -1, + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" }, { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "max_integer", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "INTEGER" - }, - "originalName": "id" + "name": "any" + } }, { - "name": "name", - "notNull": true, + "name": "max_real", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" + } }, { - "name": "bio", + "name": "max_text", "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "any" } } ], "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ - { - "number": 1, - "column": { - "name": "bio", - "length": -1, - "table": { - "schema": "main", - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - } - ], + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", + "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", "columns": [ { "name": "id", @@ -667,26 +489,24 @@ { "number": 1, "column": { - "name": "ids", + "name": "name", "notNull": true, "length": -1, - "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", "cmd": ":many", "columns": [ { @@ -729,52 +549,59 @@ { "number": 1, "column": { - "name": "ids", + "name": "offset", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "INTEGER" - }, - "isSqlcSlice": true, - "originalName": "id" + "name": "integer" + } } }, { "number": 2, "column": { - "name": "names", + "name": "limit", "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "name": "authors" - }, "type": { - "name": "TEXT" - }, - "isSqlcSlice": true, - "originalName": "name" + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", "cmd": ":exec", "parameters": [ { "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, "column": { "name": "name", "notNull": true, "length": -1, "table": { + "schema": "main", "name": "authors" }, "type": { @@ -782,13 +609,31 @@ }, "originalName": "name" } + }, + { + "number": 3, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } }, { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", "cmd": ":execlastid", "columns": [ { @@ -796,7 +641,7 @@ "notNull": true, "length": -1, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "INTEGER" @@ -813,7 +658,7 @@ "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -824,76 +669,159 @@ { "number": 2, "column": { - "name": "author_id", - "notNull": true, + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "authors" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "books", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "id", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { "name": "authors" - } + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "authors", + "name": "name", + "notNull": true, "length": -1, - "type": {}, - "embedTable": { + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + }, + { + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", "cmd": ":many", "columns": [ { @@ -930,25 +858,17 @@ "name": "TEXT" }, "originalName": "bio" - }, - { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } } ], "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "name": "books" + "name": "authors" }, "type": { "name": "TEXT" @@ -960,286 +880,368 @@ "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (\n ?, ?, ?, ?\n)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_real", + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "isSqlcSlice": true, + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 3, + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "names", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "isSqlcSlice": true, + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "number": 2, + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { - "number": 3, + "number": 2, "column": { - "name": "c_text", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "c_text" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "books" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "c_integer" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_real", + "name": "books", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_text", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_blob", + "name": "authors", "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "type": {}, + "embedTable": { + "name": "authors" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" - }, - { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" }, { - "name": "cnt", - "notNull": true, + "name": "books", "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "type": {}, + "embedTable": { + "name": "books" } } ], - "filename": "query.sql" - }, - { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", - "columns": [ - { - "name": "max_integer", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, - { - "name": "max_real", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" - } - }, + "parameters": [ { - "name": "max_text", - "length": -1, - "isFuncCall": true, - "type": { - "name": "any" + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteLegacyExample/request.message b/examples/SqliteLegacyExample/request.message index d657086028151a8fce75f68b9ef0951fe59b5f4a..98e282573a50f118c0cec4f89d307ca6fded8f92 100644 GIT binary patch delta 165 zcmbPk`^8p(YX&P9qmd9>abZqoNvf3kL;-Q3lFEYAV*TRejMUsjJ)ne?CQwu)v9u&3 zzX(}eNew722vJ>Fnp#u|Qmmu_6l8}g-ss7~G`X4i>}FpkGe(v^X0D*gy`qv#TpE+* nr4&Uq6}ZA2eL`J>6*TO16rgDHBhj;LoBdfV1vjskdBF$(ywNg% delta 122 zcmexjJKa`*YdZ@Uqmd9>abZqoNvf3ML;-Q0;^d6f+(bQ~kdgwBzi~A))8=YsGsem5 zn9j2FGI3c<_7+r}EG^EznS+N#kX33DGgsJTH#XzRF`OJynhIf#KB2C`3L0DrKwz%} RLtLBLSkJO;=8=8D2mlscAA Date: Fri, 15 Aug 2025 11:34:39 +0200 Subject: [PATCH 14/33] fix: extract common transaction exception throw to a constant --- Drivers/DbDriver.cs | 7 +- Drivers/Generators/ExecDeclareGen.cs | 8 +-- Drivers/Generators/ExecLastIdDeclareGen.cs | 7 +- Drivers/Generators/ExecRowsDeclareGen.cs | 8 +-- Drivers/Generators/ManyDeclareGen.cs | 8 +-- Drivers/Generators/OneDeclareGen.cs | 12 +--- .../MySqlConnectorDapperExample/QuerySql.cs | 60 ++++------------- .../QuerySql.cs | 60 ++++------------- examples/MySqlConnectorExample/QuerySql.cs | 64 ++++--------------- .../MySqlConnectorLegacyExample/QuerySql.cs | 64 ++++--------------- examples/NpgsqlDapperExample/QuerySql.cs | 50 +++------------ .../NpgsqlDapperLegacyExample/QuerySql.cs | 50 +++------------ examples/NpgsqlExample/QuerySql.cs | 54 ++++------------ examples/NpgsqlLegacyExample/QuerySql.cs | 54 ++++------------ examples/SqliteDapperExample/QuerySql.cs | 30 ++------- .../SqliteDapperLegacyExample/QuerySql.cs | 30 ++------- examples/SqliteExample/QuerySql.cs | 34 +++------- examples/SqliteLegacyExample/QuerySql.cs | 34 +++------- 18 files changed, 140 insertions(+), 494 deletions(-) diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 900192dc..4f58b6dd 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -55,7 +55,6 @@ public abstract class DbDriver "NpgsqlCidr", ]; - public abstract Dictionary ColumnMappings { get; } protected const string JsonElementTypeHandler = @@ -84,6 +83,12 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz } """; + public readonly string TransactionConnectionNullExcetionThrow = + $""" + if (this.{Variable.Transaction.AsPropertyName()}?.Connection == null || this.{Variable.Transaction.AsPropertyName()}?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + """; + protected DbDriver( Options options, string defaultSchema, diff --git a/Drivers/Generators/ExecDeclareGen.cs b/Drivers/Generators/ExecDeclareGen.cs index d2eee37c..2470457d 100644 --- a/Drivers/Generators/ExecDeclareGen.cs +++ b/Drivers/Generators/ExecDeclareGen.cs @@ -59,9 +59,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var transactionProperty = Variable.Transaction.AsPropertyName(); var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} await this.{{transactionProperty}}.Connection.ExecuteAsync( {{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}}); @@ -94,9 +92,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) var commandParameters = CommonGen.AddParametersToCommand(query); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { {{commandVar}}.CommandText = {{sqlVar}}; diff --git a/Drivers/Generators/ExecLastIdDeclareGen.cs b/Drivers/Generators/ExecLastIdDeclareGen.cs index ab086d4b..48c9a231 100644 --- a/Drivers/Generators/ExecLastIdDeclareGen.cs +++ b/Drivers/Generators/ExecLastIdDeclareGen.cs @@ -58,8 +58,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var transactionProperty = Variable.Transaction.AsPropertyName(); var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); + {{dbDriver.TransactionConnectionNullExcetionThrow}} return await this.{{transactionProperty}}.Connection.QuerySingleAsync<{{dbDriver.GetIdColumnType(query)}}>({{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}}); """; } @@ -91,9 +90,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) var returnLastId = ((IExecLastId)dbDriver).GetLastIdStatement(query).JoinByNewLine(); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { {{commandVar}}.CommandText = {{sqlVar}}; diff --git a/Drivers/Generators/ExecRowsDeclareGen.cs b/Drivers/Generators/ExecRowsDeclareGen.cs index 21ab264e..7acdc8c4 100644 --- a/Drivers/Generators/ExecRowsDeclareGen.cs +++ b/Drivers/Generators/ExecRowsDeclareGen.cs @@ -58,9 +58,7 @@ private string GetDapperWithTxBody(string sqlVar, Query query) var transactionProperty = Variable.Transaction.AsPropertyName(); var dapperArgs = query.Params.Any() ? $", {Variable.QueryParams.AsVarName()}" : string.Empty; return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} return await this.{{transactionProperty}}.Connection.ExecuteAsync( {{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}}); @@ -92,9 +90,7 @@ private string GetDriverWithTxBody(string sqlVar, Query query) var commandParameters = CommonGen.AddParametersToCommand(query); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { {{commandVar}}.CommandText = {{sqlVar}}; diff --git a/Drivers/Generators/ManyDeclareGen.cs b/Drivers/Generators/ManyDeclareGen.cs index 73945fe5..73a13547 100644 --- a/Drivers/Generators/ManyDeclareGen.cs +++ b/Drivers/Generators/ManyDeclareGen.cs @@ -68,9 +68,7 @@ private string GetDapperWithTxBody(string sqlVar, string returnInterface, Query var returnType = dbDriver.AddNullableSuffixIfNeeded(returnInterface, true); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} return (await this.{{transactionProperty}}.Connection.QueryAsync<{{returnType}}>( {{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}})).AsList(); @@ -124,9 +122,7 @@ private string GetDriverWithTxBody(string sqlVar, string returnInterface, Query """; return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - + {{dbDriver.TransactionConnectionNullExcetionThrow}} using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { {{commandVar}}.CommandText = {{sqlVar}}; diff --git a/Drivers/Generators/OneDeclareGen.cs b/Drivers/Generators/OneDeclareGen.cs index 0e43dd2f..c35d4d52 100644 --- a/Drivers/Generators/OneDeclareGen.cs +++ b/Drivers/Generators/OneDeclareGen.cs @@ -68,11 +68,7 @@ private string GetDapperWithTxBody(string sqlVar, string returnInterface, Query var returnType = dbDriver.AddNullableSuffixIfNeeded(returnInterface, false); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + {{dbDriver.TransactionConnectionNullExcetionThrow}} return await this.{{transactionProperty}}.Connection.QueryFirstOrDefaultAsync<{{returnType}}>( {{sqlVar}}{{dapperArgs}}, transaction: this.{{transactionProperty}}); @@ -118,11 +114,7 @@ private string GetDriverWithTxBody(string sqlVar, string returnInterface, Query var returnDataclass = CommonGen.InstantiateDataclass(query.Columns.ToArray(), returnInterface, query); return $$""" - if (this.{{transactionProperty}}?.Connection == null || this.{{transactionProperty}}?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + {{dbDriver.TransactionConnectionNullExcetionThrow}} using (var {{commandVar}} = this.{{transactionProperty}}.Connection.CreateCommand()) { {{commandVar}}.CommandText = {{sqlVar}}; diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index 1fc85a4b..7bd88a5a 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -69,10 +69,7 @@ public class GetAuthorArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -179,10 +176,7 @@ public class GetAuthorByIdArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } @@ -548,10 +542,7 @@ public class GetFirstExtendedBioByTypeArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); } @@ -719,10 +710,7 @@ public class GetMysqlNumericTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); } @@ -758,10 +746,7 @@ public class GetMysqlNumericTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); } @@ -915,10 +900,7 @@ public class GetMysqlStringTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); } @@ -951,10 +933,7 @@ public class GetMysqlStringTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); } @@ -1078,10 +1057,7 @@ public class GetMysqlDatetimeTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } @@ -1107,10 +1083,7 @@ public class GetMysqlDatetimeTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); } @@ -1243,10 +1216,7 @@ public class GetMysqlBinaryTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); } @@ -1274,10 +1244,7 @@ public class GetMysqlBinaryTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); } @@ -1315,10 +1282,7 @@ public class GetMysqlFunctionsRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index fc792791..601ceed2 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -194,10 +194,7 @@ public async Task GetMysqlNumericTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); } @@ -233,10 +230,7 @@ public async Task GetMysqlNumericTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); } @@ -389,10 +383,7 @@ public async Task GetMysqlStringTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); } @@ -425,10 +416,7 @@ public async Task GetMysqlStringTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); } @@ -552,10 +540,7 @@ public async Task GetMysqlDatetimeTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } @@ -581,10 +566,7 @@ public async Task GetMysqlDatetimeTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); } @@ -716,10 +698,7 @@ public async Task GetMysqlBinaryTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); } @@ -747,10 +726,7 @@ public async Task GetMysqlBinaryTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); } @@ -788,10 +764,7 @@ public async Task GetMysqlFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } @@ -820,10 +793,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -930,10 +900,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } @@ -1299,10 +1266,7 @@ public async Task GetFirstExtendedBioByType(GetFir } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); } diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index aeb9959e..b7e7e85d 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -73,10 +73,7 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorSql; @@ -196,7 +193,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -240,10 +237,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -507,7 +501,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -705,10 +699,7 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetFirstExtendedBioByTypeSql; @@ -911,10 +902,7 @@ public async Task InsertMysqlNumericTypesBatch(List GetMysqlNumericTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlNumericTypesSql; @@ -343,10 +340,7 @@ public async Task GetMysqlNumericTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlNumericTypesCntSql; @@ -590,10 +584,7 @@ public async Task GetMysqlStringTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlStringTypesSql; @@ -679,10 +670,7 @@ public async Task GetMysqlStringTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlStringTypesCntSql; @@ -880,10 +868,7 @@ public async Task GetMysqlDatetimeTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlDatetimeTypesSql; @@ -948,10 +933,7 @@ public async Task GetMysqlDatetimeTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlDatetimeTypesCntSql; @@ -1154,10 +1136,7 @@ public async Task GetMysqlBinaryTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlBinaryTypesSql; @@ -1228,10 +1207,7 @@ public async Task GetMysqlBinaryTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlBinaryTypesCntSql; @@ -1320,10 +1296,7 @@ public async Task GetMysqlFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetMysqlFunctionsSql; @@ -1385,10 +1358,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorSql; @@ -1526,7 +1496,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1578,10 +1548,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -1880,7 +1847,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; @@ -2110,10 +2077,7 @@ public async Task GetFirstExtendedBioByType(GetFir } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetFirstExtendedBioByTypeSql; diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 6fd7fe57..1f3b7606 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -222,10 +222,7 @@ public class GetPostgresTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } @@ -269,10 +266,7 @@ public class GetPostgresTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } @@ -295,10 +289,7 @@ public class GetPostgresFunctionsRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } @@ -370,10 +361,7 @@ public class GetPostgresUnstructuredTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } @@ -448,10 +436,7 @@ public class GetPostgresArrayTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } @@ -498,10 +483,7 @@ public class GetPostgresArrayTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); } @@ -613,10 +595,7 @@ public class GetPostgresGeoTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } @@ -660,10 +639,7 @@ public class GetAuthorArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -727,10 +703,7 @@ public class CreateAuthorArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -785,10 +758,7 @@ public class GetAuthorByIdArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 2913142c..359db9b4 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -223,10 +223,7 @@ public async Task GetPostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } @@ -270,10 +267,7 @@ public async Task GetPostgresTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } @@ -296,10 +290,7 @@ public async Task GetPostgresFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } @@ -371,10 +362,7 @@ public async Task GetPostgresUnstructuredTypes( } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } @@ -449,10 +437,7 @@ public async Task GetPostgresArrayTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } @@ -499,10 +484,7 @@ public async Task GetPostgresArrayTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); } @@ -614,10 +596,7 @@ public async Task GetPostgresGeoTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } @@ -661,10 +640,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -728,10 +704,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } @@ -786,10 +759,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index 06d173fb..d0a73807 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -212,10 +212,7 @@ public async Task InsertPostgresTypesBatch(List ar } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresTypesSql; @@ -309,10 +306,7 @@ public async Task InsertPostgresTypesBatch(List ar } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresTypesCntSql; @@ -384,10 +378,7 @@ public async Task InsertPostgresTypesBatch(List ar } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresFunctionsSql; @@ -511,10 +502,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresUnstructuredTypesSql; @@ -644,10 +632,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresArrayTypesSql; @@ -723,10 +708,7 @@ public async Task InsertPostgresArrayTypesBatch(List> ListAuthors(ListAuthorsArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -1110,7 +1083,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1153,10 +1126,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -1403,7 +1373,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 4861b486..c8a67469 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -292,10 +292,7 @@ public async Task GetPostgresTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresTypesSql; @@ -415,10 +412,7 @@ public async Task GetPostgresTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresTypesCntSql; @@ -495,10 +489,7 @@ public async Task GetPostgresFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresFunctionsSql; @@ -638,10 +629,7 @@ public async Task GetPostgresUnstructuredTypes( } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresUnstructuredTypesSql; @@ -789,10 +777,7 @@ public async Task GetPostgresArrayTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresArrayTypesSql; @@ -875,10 +860,7 @@ public async Task GetPostgresArrayTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresArrayTypesCntSql; @@ -1054,10 +1036,7 @@ public async Task GetPostgresGeoTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetPostgresGeoTypesSql; @@ -1148,10 +1127,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorSql; @@ -1268,10 +1244,7 @@ public async Task CreateAuthor(CreateAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorSql; @@ -1323,7 +1296,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -1374,10 +1347,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -1662,7 +1632,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; diff --git a/examples/SqliteDapperExample/QuerySql.cs b/examples/SqliteDapperExample/QuerySql.cs index 5f98a5fb..8e9638c6 100644 --- a/examples/SqliteDapperExample/QuerySql.cs +++ b/examples/SqliteDapperExample/QuerySql.cs @@ -111,10 +111,7 @@ public class GetSqliteTypesRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } @@ -139,10 +136,7 @@ public class GetSqliteTypesCntRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } @@ -165,10 +159,7 @@ public class GetSqliteFunctionsRow } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); } @@ -212,10 +203,7 @@ public class GetAuthorArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -326,10 +314,7 @@ public class GetAuthorByIdArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } @@ -360,10 +345,7 @@ public class GetAuthorByIdWithMultipleNamedParamArgs } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdWithMultipleNamedParamSql, queryParams, transaction: this.Transaction); } diff --git a/examples/SqliteDapperLegacyExample/QuerySql.cs b/examples/SqliteDapperLegacyExample/QuerySql.cs index 10425dce..5494b301 100644 --- a/examples/SqliteDapperLegacyExample/QuerySql.cs +++ b/examples/SqliteDapperLegacyExample/QuerySql.cs @@ -112,10 +112,7 @@ public async Task GetSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); } @@ -140,10 +137,7 @@ public async Task GetSqliteTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); } @@ -166,10 +160,7 @@ public async Task GetSqliteFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); } @@ -213,10 +204,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } @@ -327,10 +315,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } @@ -361,10 +346,7 @@ public async Task GetAuthorByIdWithMulti } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdWithMultipleNamedParamSql, queryParams, transaction: this.Transaction); } diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index a9f84af4..c2b96803 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -123,10 +123,7 @@ public async Task InsertSqliteTypesBatch(List args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteTypesSql; @@ -181,10 +178,7 @@ public async Task InsertSqliteTypesBatch(List args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteTypesCntSql; @@ -238,10 +232,7 @@ public async Task InsertSqliteTypesBatch(List args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteFunctionsSql; @@ -322,10 +313,7 @@ public async Task DeleteAllSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorSql; @@ -446,7 +434,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -490,10 +478,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -549,10 +534,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdWithMultipleNamedParamSql; @@ -791,7 +773,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; diff --git a/examples/SqliteLegacyExample/QuerySql.cs b/examples/SqliteLegacyExample/QuerySql.cs index 2cf9f7f3..13b7c294 100644 --- a/examples/SqliteLegacyExample/QuerySql.cs +++ b/examples/SqliteLegacyExample/QuerySql.cs @@ -141,10 +141,7 @@ public async Task GetSqliteTypes() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteTypesSql; @@ -206,10 +203,7 @@ public async Task GetSqliteTypesCnt() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteTypesCntSql; @@ -268,10 +262,7 @@ public async Task GetSqliteFunctions() } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetSqliteFunctionsSql; @@ -360,10 +351,7 @@ public async Task GetAuthor(GetAuthorArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorSql; @@ -505,7 +493,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateAuthorReturnIdSql; @@ -557,10 +545,7 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdSql; @@ -625,10 +610,7 @@ public async Task GetAuthorByIdWithMulti } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - { - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); - } - + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = GetAuthorByIdWithMultipleNamedParamSql; @@ -905,7 +887,7 @@ public async Task CreateBook(CreateBookArgs args) } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new System.InvalidOperationException("Transaction is provided, but its connection is null."); + throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { command.CommandText = CreateBookSql; From a0b1a77009c27b70bead1cbd56bf492b903017ed Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 15 Aug 2025 12:57:55 +0200 Subject: [PATCH 15/33] fix: more refactoring --- Drivers/MySqlConnectorDriver.cs | 38 ++++---- Drivers/NpgsqlDriver.cs | 28 +++--- Drivers/SqliteDriver.cs | 96 ++++++++++----------- examples/NpgsqlDapperExample/Utils.cs | 26 +++--- examples/NpgsqlDapperLegacyExample/Utils.cs | 25 +++--- 5 files changed, 97 insertions(+), 116 deletions(-) diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 42d9725d..347e94e4 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -157,24 +157,6 @@ public partial class MySqlConnectorDriver( public override string TransactionClassName => "MySqlTransaction"; - private readonly Func _setTypeHandlerFunc = x => - $$""" - private class {{x}}TypeHandler : SqlMapper.TypeHandler> - { - public override HashSet<{{x}}> Parse(object value) - { - if (value is string s) - return s.To{{x}}Set(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet<{{x}}>"); - } - - public override void SetValue(IDbDataParameter parameter, HashSet<{{x}}> value) - { - parameter.Value = string.Join(",", value); - } - } - """; - public override ISet GetUsingDirectivesForQueries() { return base @@ -247,6 +229,24 @@ protected override ISet GetConfigureSqlMappings() private MemberDeclarationSyntax[] GetSetTypeHandlers() { + var setTypeHandlerFunc = (string x) => + $$""" + private class {{x}}TypeHandler : SqlMapper.TypeHandler> + { + public override HashSet<{{x}}> Parse(object value) + { + if (value is string s) + return s.To{{x}}Set(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet<{{x}}>"); + } + + public override void SetValue(IDbDataParameter parameter, HashSet<{{x}}> value) + { + parameter.Value = string.Join(",", value); + } + } + """; + return Queries .SelectMany(q => q.Columns) .Where(c => @@ -254,7 +254,7 @@ private MemberDeclarationSyntax[] GetSetTypeHandlers() var enumType = GetEnumType(c); return enumType is not null && IsSetDataType(c, enumType); }) - .Select(c => _setTypeHandlerFunc(c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema))) + .Select(c => setTypeHandlerFunc(c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema))) .Distinct() .Select(m => ParseMemberDeclaration(m)!) .ToArray(); diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index a707ac46..ba94d2ca 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -194,7 +194,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlPoint), new NpgsqlTypeHandler());" ), ["NpgsqlLine"] = new( new() @@ -204,7 +204,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlLine), new NpgsqlTypeHandler());" ), ["NpgsqlLSeg"] = new( new() @@ -214,7 +214,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlLSeg), new NpgsqlTypeHandler());" ), ["NpgsqlBox"] = new( new() @@ -224,7 +224,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlBox), new NpgsqlTypeHandler());" ), ["NpgsqlPath"] = new( new() @@ -234,7 +234,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlPath), new NpgsqlTypeHandler());" ), ["NpgsqlPolygon"] = new( new() @@ -244,7 +244,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlPolygon), new NpgsqlTypeHandler());" ), ["NpgsqlCircle"] = new( new() @@ -254,7 +254,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlCircle), new NpgsqlTypeHandler());" ), /* Network data types */ @@ -266,7 +266,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "NpgsqlTypes", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlCidr), new NpgsqlTypeHandler());" ), ["IPAddress"] = new( new() @@ -276,7 +276,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "System.Net", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(IPAddress), new NpgsqlTypeHandler());" ), ["PhysicalAddress"] = new( new() @@ -286,7 +286,7 @@ public NpgsqlDriver( readerFn: ordinal => $"reader.GetFieldValue({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", usingDirective: "System.Net.NetworkInformation", - sqlMapper: "RegisterNpgsqlTypeHandler();" + sqlMapper: "SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler());" ), /* Other data types */ @@ -405,13 +405,7 @@ public override void SetValue(IDbDataParameter parameter, T{{optionalDotnetCoreN parameter.Value = value; } } - """)!, - ParseMemberDeclaration($$""" - private static void RegisterNpgsqlTypeHandler(){{optionalDotnetCoreSuffix}} - { - SqlMapper.AddTypeHandler(typeof(T), new NpgsqlTypeHandler()); - } - """)!, + """)! ]; } diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index d8bbeacc..d2ae33ed 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -82,37 +82,40 @@ public override ISet GetUsingDirectivesForUtils() ); } + private const string ValuesRegex = """ + private static readonly Regex ValuesRegex = new Regex(@"VALUES\s*\((?[^)]*)\)", RegexOptions.IgnoreCase); + """; + + private const string TransformQueryForBatch = """ + public static string TransformQueryForSqliteBatch(string originalSql, int cntRecords) + { + var match = ValuesRegex.Match(originalSql); + if (!match.Success) + throw new ArgumentException("The query does not contain a valid VALUES clause."); + + var valuesParams = match.Groups["params"].Value + .Split(',') + .Select(p => p.Trim()) + .ToList(); + var batchRows = Enumerable.Range(0, cntRecords) + .Select(i => "(" + string.Join(", ", valuesParams.Select(p => $"{p}{i}")) + ")"); + + var batchValuesClause = "VALUES " + string.Join(",\n", batchRows); + return ValuesRegex.Replace(originalSql, batchValuesClause); + } + """; + public override MemberDeclarationSyntax[] GetMemberDeclarationsForUtils() { - var memberDeclarations = base + return base .GetMemberDeclarationsForUtils() - .AddRangeIf([ParseMemberDeclaration(TransformQueryForSliceArgsImpl)!], SliceQueryExists()); - - if (!CopyFromQueryExists()) - return memberDeclarations.ToArray(); - - return memberDeclarations - .Append(ParseMemberDeclaration(""" - private static readonly Regex ValuesRegex = new Regex(@"VALUES\s*\((?[^)]*)\)", RegexOptions.IgnoreCase); - """)!) - .Append(ParseMemberDeclaration(""" - public static string TransformQueryForSqliteBatch(string originalSql, int cntRecords) - { - var match = ValuesRegex.Match(originalSql); - if (!match.Success) - throw new ArgumentException("The query does not contain a valid VALUES clause."); - - var valuesParams = match.Groups["params"].Value - .Split(',') - .Select(p => p.Trim()) - .ToList(); - var batchRows = Enumerable.Range(0, cntRecords) - .Select(i => "(" + string.Join(", ", valuesParams.Select(p => $"{p}{i}")) + ")"); - - var batchValuesClause = "VALUES " + string.Join(",\n", batchRows); - return ValuesRegex.Replace(originalSql, batchValuesClause); - } - """)!) + .AddRangeIf([ + ParseMemberDeclaration(TransformQueryForSliceArgsImpl)! + ], SliceQueryExists()) + .AddRangeIf([ + ParseMemberDeclaration(ValuesRegex)!, + ParseMemberDeclaration(TransformQueryForBatch)! + ], CopyFromQueryExists()) .ToArray(); } @@ -131,33 +134,28 @@ public override string CreateSqlCommand(string sqlTextConstant) public override string TransformQueryText(Query query) { - // Regex to detect numbered parameters like ?1, ?2 - var areArgumentsNumbered = new Regex(@"\?\d+\b").IsMatch(query.Text); var queryText = query.Text; + var areArgumentsNumbered = NumberedArgumentsRegex().IsMatch(queryText); - if (areArgumentsNumbered) - { - // For numbered parameters, we replace all occurrences of each parameter number. - foreach (var p in query.Params) - { - var column = GetColumnFromParam(p, query); - var regex = new Regex($@"\?{p.Number}\b"); - queryText = regex.Replace(queryText, $"@{column.Name}"); - } - } - else + foreach (var p in query.Params) { - // For positional '?' parameters, we must replace them one by one in order. - var regex = new Regex(@"\?"); - foreach (var p in query.Params) - { - var column = GetColumnFromParam(p, query); - queryText = regex.Replace(queryText, $"@{column.Name}", 1); - } + var column = GetColumnFromParam(p, query); + queryText = areArgumentsNumbered + // For numbered parameters, we replace all occurrences of each parameter number. + ? new Regex($@"\?{p.Number}\b").Replace(queryText, $"@{column.Name}") + // For positional '?' parameters, we must replace them one by one in order. + : QueryParamRegex().Replace(queryText, $"@{column.Name}", 1); } return queryText; } + // Regex to detect numbered parameters like ?1, ?2 + [GeneratedRegex(@"\?\d+\b")] + private static partial Regex NumberedArgumentsRegex(); + + [GeneratedRegex(@"\?")] + private static partial Regex QueryParamRegex(); + public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) { @@ -222,7 +220,7 @@ string AddParametersToCommand() var nullParamCast = p.Column.NotNull ? string.Empty : " ?? (object)DBNull.Value"; var addParamToCommand = $$""" {{commandVar}}.Parameters.AddWithValue($"@{{p.Column.Name}}{i}", {{argsVar}}[i].{{param}}{{nullParamCast}}); - """; + """; return addParamToCommand; }).JoinByNewLine(); diff --git a/examples/NpgsqlDapperExample/Utils.cs b/examples/NpgsqlDapperExample/Utils.cs index 0c800aac..03167caa 100644 --- a/examples/NpgsqlDapperExample/Utils.cs +++ b/examples/NpgsqlDapperExample/Utils.cs @@ -50,16 +50,16 @@ public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); SqlMapper.AddTypeHandler(typeof(XmlDocument), new XmlDocumentTypeHandler()); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); + SqlMapper.AddTypeHandler(typeof(NpgsqlPoint), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlLine), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlLSeg), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlBox), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlPath), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlPolygon), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlCircle), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlCidr), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(IPAddress), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler()); } private class NpgsqlTypeHandler : SqlMapper.TypeHandler where T : notnull @@ -74,10 +74,4 @@ public override void SetValue(IDbDataParameter parameter, T? value) parameter.Value = value; } } - - private static void RegisterNpgsqlTypeHandler() - where T : notnull - { - SqlMapper.AddTypeHandler(typeof(T), new NpgsqlTypeHandler()); - } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/Utils.cs b/examples/NpgsqlDapperLegacyExample/Utils.cs index 585d0584..603c6891 100644 --- a/examples/NpgsqlDapperLegacyExample/Utils.cs +++ b/examples/NpgsqlDapperLegacyExample/Utils.cs @@ -51,16 +51,16 @@ public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); SqlMapper.AddTypeHandler(typeof(XmlDocument), new XmlDocumentTypeHandler()); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); - RegisterNpgsqlTypeHandler(); + SqlMapper.AddTypeHandler(typeof(NpgsqlPoint), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlLine), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlLSeg), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlBox), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlPath), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlPolygon), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlCircle), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlCidr), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(IPAddress), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler()); } private class NpgsqlTypeHandler : SqlMapper.TypeHandler @@ -75,10 +75,5 @@ public override void SetValue(IDbDataParameter parameter, T value) parameter.Value = value; } } - - private static void RegisterNpgsqlTypeHandler() - { - SqlMapper.AddTypeHandler(typeof(T), new NpgsqlTypeHandler()); - } } } \ No newline at end of file From b1b9b91e4b3f59dcda04cb4eb70ca69cc85a02d6 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 15 Aug 2025 13:11:59 +0200 Subject: [PATCH 16/33] fix: simplify Drivers method placement --- Drivers/DbDriver.cs | 33 ++++++++++++++++++++++++++ Drivers/MySqlConnectorDriver.cs | 41 +++------------------------------ Drivers/NpgsqlDriver.cs | 34 +-------------------------- Drivers/SqliteDriver.cs | 36 +---------------------------- 4 files changed, 38 insertions(+), 106 deletions(-) diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 4f58b6dd..58b9b189 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -1,5 +1,6 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; +using SqlcGenCsharp.Drivers.Generators; using System; using System.Collections.Generic; using System.Linq; @@ -118,6 +119,38 @@ protected DbDriver( } } + public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) + { + return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + public virtual ISet GetUsingDirectivesForQueries() { return new HashSet diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 347e94e4..2c31aef4 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -1,12 +1,9 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; -using SqlcGenCsharp.Drivers.Generators; -using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; -using OneDeclareGen = SqlcGenCsharp.Drivers.Generators.OneDeclareGen; namespace SqlcGenCsharp.Drivers; @@ -14,7 +11,7 @@ public partial class MySqlConnectorDriver( Options options, string defaultSchema, Dictionary> tables, - Dictionary> enums, + Dictionary> enums, IList queries) : DbDriver(options, defaultSchema, tables, enums, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { @@ -398,33 +395,6 @@ public override string TransformQueryText(Query query) [GeneratedRegex(@"\?")] private static partial Regex QueryParamRegex(); - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) - { - return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - /* :execlastid methods */ - public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - public override string[] GetLastIdStatement(Query query) { return @@ -440,11 +410,6 @@ public override string[] GetLastIdStatement(Query query) public const string ByteCsvConverter = "ByteCsvConverter"; public const string ByteArrayCsvConverter = "ByteArrayCsvConverter"; - public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) - { - return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - public string GetCopyFromImpl(Query query, string queryTextConstant) { const string tempCsvFilename = "input.csv"; @@ -570,13 +535,13 @@ private ISet GetSetConverters(Query query) } /* Enum methods */ - public override string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) + public override string EnumToCsharpTypeName(Column column, Enum enumType) { var enumName = column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); return IsSetDataType(column, enumType) ? $"HashSet<{enumName}>" : enumName; } - private static bool IsSetDataType(Column column, Plugin.Enum enumType) + private static bool IsSetDataType(Column column, Enum enumType) { return column.Length > enumType.Vals.Select(v => v.Length).Sum(); } diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index ba94d2ca..31894d8f 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -14,7 +14,7 @@ public NpgsqlDriver( Options options, string defaultSchema, Dictionary> tables, - Dictionary> enums, + Dictionary> enums, IList queries) : base(options, defaultSchema, tables, enums, queries) { @@ -454,38 +454,6 @@ string GetCopyCommand() } } - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) - { - return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - public string GetCopyFromImpl(Query query, string queryTextConstant) { var (establishConnection, connectionOpen) = EstablishConnection(query); diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index d2ae33ed..6f0fd171 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -1,7 +1,5 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; -using SqlcGenCsharp.Drivers.Generators; -using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; @@ -13,7 +11,7 @@ public partial class SqliteDriver( Options options, string defaultSchema, Dictionary> tables, - Dictionary> enums, + Dictionary> enums, IList queries) : DbDriver(options, defaultSchema, tables, enums, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { @@ -156,38 +154,6 @@ public override string TransformQueryText(Query query) [GeneratedRegex(@"\?")] private static partial Regex QueryParamRegex(); - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) - { - return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - public string GetCopyFromImpl(Query query, string queryTextConstant) { var sqlTextVar = Variable.TransformedSql.AsVarName(); From e5cb381c875dde8410c13b4939be720d89da6085 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 15 Aug 2025 14:13:26 +0200 Subject: [PATCH 17/33] fix: missing assertions in postgres guid end2end tests --- Drivers/MySqlConnectorDriver.cs | 46 +++++++------- Drivers/NpgsqlDriver.cs | 3 +- .../Templates/PostgresTests.cs | 2 +- .../NpgsqlDapperTester.generated.cs | 2 +- .../EndToEndTests/NpgsqlTester.generated.cs | 2 +- .../NpgsqlDapperTester.generated.cs | 2 +- .../NpgsqlTester.generated.cs | 2 +- examples/NpgsqlDapperExample/QuerySql.cs | 6 +- examples/NpgsqlExample/QuerySql.cs | 60 +++++++++---------- 9 files changed, 63 insertions(+), 62 deletions(-) diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 2c31aef4..75b4f143 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -438,8 +438,7 @@ public string GetCopyFromImpl(Query query, string queryTextConstant) var {{optionsVar}} = new TypeConverterOptions { Formats = new[] { supportedDateTimeFormat } }; {{csvWriterVar}}.Context.TypeConverterOptionsCache.AddOptions({{optionsVar}}); {{csvWriterVar}}.Context.TypeConverterOptionsCache.AddOptions({{optionsVar}}); - {{GetBoolAndByteConverters(query).JoinByNewLine()}} - {{GetCsvNullConverters(query).JoinByNewLine()}} + {{GetCsvConverters(query).JoinByNewLine()}} await {{csvWriterVar}}.WriteRecordsAsync({{Variable.Args.AsVarName()}}); } @@ -471,26 +470,7 @@ public string GetCopyFromImpl(Query query, string queryTextConstant) "byte[]" }; - private ISet GetCsvNullConverters(Query query) - { - var nullConverterFn = Variable.NullConverterFn.AsVarName(); - var converters = new HashSet(); - foreach (var p in query.Params) - { - var csharpType = GetCsharpTypeWithoutNullableSuffix(p.Column, query); - if ( - !BoolAndByteTypes.Contains(csharpType) && - !IsSetDataType(p.Column) && - TypeExistsInQuery(csharpType, query)) - { - var nullableCsharpType = AddNullableSuffixIfNeeded(csharpType, false); - converters.Add($"{Variable.CsvWriter.AsVarName()}.Context.TypeConverterCache.AddConverter<{nullableCsharpType}>({nullConverterFn});"); - } - } - return converters; - } - - private ISet GetBoolAndByteConverters(Query query) + private ISet GetCsvConverters(Query query) { var csvWriterVar = Variable.CsvWriter.AsVarName(); return new HashSet() @@ -515,7 +495,27 @@ private ISet GetBoolAndByteConverters(Query query) ], TypeExistsInQuery("byte[]", query) ) - .AddRangeExcludeNulls(GetSetConverters(query)); + .AddRangeExcludeNulls(GetSetConverters(query)) + .AddRangeExcludeNulls(GetCsvNullConverters(query)); + } + + private ISet GetCsvNullConverters(Query query) + { + var nullConverterFn = Variable.NullConverterFn.AsVarName(); + var converters = new HashSet(); + foreach (var p in query.Params) + { + var csharpType = GetCsharpTypeWithoutNullableSuffix(p.Column, query); + if ( + !BoolAndByteTypes.Contains(csharpType) && + !IsSetDataType(p.Column) && + TypeExistsInQuery(csharpType, query)) + { + var nullableCsharpType = AddNullableSuffixIfNeeded(csharpType, false); + converters.Add($"{Variable.CsvWriter.AsVarName()}.Context.TypeConverterCache.AddConverter<{nullableCsharpType}>({nullConverterFn});"); + } + } + return converters; } private ISet GetSetConverters(Query query) diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index 31894d8f..0507dabc 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -415,6 +415,7 @@ public override ConnectionGenCommands EstablishConnection(Query query) var connectionVar = Variable.Connection.AsVarName(); var embedTableExists = query.Columns.Any(c => c.EmbedTable is not null); var useOpenConnection = query.Cmd == ":copyfrom" || (Options.UseDapper && !embedTableExists); + var optionalNotNullVerify = Options.DotnetFramework.IsDotnetCore() ? "!" : string.Empty; return useOpenConnection ? new ConnectionGenCommands( @@ -422,7 +423,7 @@ public override ConnectionGenCommands EstablishConnection(Query query) string.Empty ) : new ConnectionGenCommands( - $"var {connectionVar} = NpgsqlDataSource.Create({connectionStringVar})", + $"var {connectionVar} = NpgsqlDataSource.Create({connectionStringVar}{optionalNotNullVerify})", string.Empty ); } diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 9596247f..1970cad8 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -959,7 +959,7 @@ public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) CUuid = cUuid }; var actual = await QuerySql.GetPostgresTypesCnt(); - Assert.That(actual{{Consts.UnknownRecordValuePlaceholder}}.Cnt, Is.EqualTo(expected.Cnt)); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) { diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index e18afe45..50aa80d1 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -681,7 +681,7 @@ public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) CUuid = cUuid }; var actual = await QuerySql.GetPostgresTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); + AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index c18ef139..ff46a314 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -681,7 +681,7 @@ public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) CUuid = cUuid }; var actual = await QuerySql.GetPostgresTypesCnt(); - Assert.That(actual.Value.Cnt, Is.EqualTo(expected.Cnt)); + AssertSingularEquals(expected, actual.Value); void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index a179526e..4babb377 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -681,7 +681,7 @@ public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) CUuid = cUuid }; var actual = await QuerySql.GetPostgresTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); + AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 7377d05c..0ee9a507 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -681,7 +681,7 @@ public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) CUuid = cUuid }; var actual = await QuerySql.GetPostgresTypesCnt(); - Assert.That(actual.Cnt, Is.EqualTo(expected.Cnt)); + AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 1f3b7606..5631b310 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -943,7 +943,7 @@ public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { @@ -984,7 +984,7 @@ public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { @@ -1031,7 +1031,7 @@ public async Task> GetAuthorsByBookName(GetAuthors { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index d0a73807..ddd466b8 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -46,7 +46,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { @@ -167,7 +167,7 @@ public async Task InsertPostgresTypesBatch(List ar { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetPostgresTypesSql)) { @@ -262,7 +262,7 @@ public async Task InsertPostgresTypesBatch(List ar { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) { @@ -355,7 +355,7 @@ public async Task InsertPostgresTypesBatch(List ar { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) { @@ -405,7 +405,7 @@ public async Task TruncatePostgresTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { @@ -432,7 +432,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { @@ -471,7 +471,7 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { @@ -537,7 +537,7 @@ public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { @@ -564,7 +564,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) { @@ -605,7 +605,7 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) { @@ -686,7 +686,7 @@ public async Task InsertPostgresArrayTypesBatch(List> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(ListAuthorsSql)) { @@ -1012,7 +1012,7 @@ public async Task> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(CreateAuthorSql)) { @@ -1070,7 +1070,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) { @@ -1102,7 +1102,7 @@ public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorByIdSql)) { @@ -1156,7 +1156,7 @@ public async Task> GetAuthorByNamePattern(GetAut { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) { @@ -1195,7 +1195,7 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(DeleteAuthorSql)) { @@ -1223,7 +1223,7 @@ public async Task TruncateAuthors() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(TruncateAuthorsSql)) { @@ -1250,7 +1250,7 @@ public async Task UpdateAuthors(UpdateAuthorsArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(UpdateAuthorsSql)) { @@ -1278,7 +1278,7 @@ public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) { @@ -1318,7 +1318,7 @@ public async Task> GetAuthorsByIdsAndNames(GetA { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) { @@ -1360,7 +1360,7 @@ public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(CreateBookSql)) { @@ -1391,7 +1391,7 @@ public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { @@ -1428,7 +1428,7 @@ public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { @@ -1466,7 +1466,7 @@ public async Task> GetAuthorsByBookName(GetAuthors { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { From 5a6e77748edcdd3b8cae01c540dd5797e809e806 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 15 Aug 2025 17:42:18 +0200 Subject: [PATCH 18/33] fix: revert incorrect changes to Drivers --- Drivers/DbDriver.cs | 32 ------------------------------ Drivers/MySqlConnectorDriver.cs | 35 +++++++++++++++++++++++++++++++++ Drivers/NpgsqlDriver.cs | 33 +++++++++++++++++++++++++++++++ Drivers/SqliteDriver.cs | 33 +++++++++++++++++++++++++++++++ 4 files changed, 101 insertions(+), 32 deletions(-) diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 58b9b189..0b7886f2 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -119,38 +119,6 @@ protected DbDriver( } } - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, - string returnInterface, Query query) - { - return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); - } - - public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) - { - return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - - public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) - { - return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); - } - public virtual ISet GetUsingDirectivesForQueries() { return new HashSet diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 75b4f143..c4a0d0cb 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -1,5 +1,6 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; +using SqlcGenCsharp.Drivers.Generators; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; @@ -154,6 +155,40 @@ public partial class MySqlConnectorDriver( public override string TransactionClassName => "MySqlTransaction"; + + public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) + { + return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public override ISet GetUsingDirectivesForQueries() { return base diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index 0507dabc..9a9d7130 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -347,6 +347,39 @@ public override void SetValue(IDbDataParameter parameter, XmlDocument value) } """; + + public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) + { + return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + public override ISet GetUsingDirectivesForQueries() { return base.GetUsingDirectivesForQueries().AddRangeExcludeNulls( diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index 6f0fd171..833326a3 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -1,5 +1,6 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; +using SqlcGenCsharp.Drivers.Generators; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; @@ -103,6 +104,38 @@ public static string TransformQueryForSqliteBatch(string originalSql, int cntRec } """; + public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new OneDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ManyDeclare(string queryTextConstant, string argInterface, + string returnInterface, Query query) + { + return new ManyDeclareGen(this).Generate(queryTextConstant, argInterface, returnInterface, query); + } + + public MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecRowsDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax ExecLastIdDeclare(string queryTextConstant, string argInterface, Query query) + { + return new ExecLastIdDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + + public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string argInterface, Query query) + { + return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); + } + public override MemberDeclarationSyntax[] GetMemberDeclarationsForUtils() { return base From 7761b3121297079acd0e4b836df162fadc296c80 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 16 Aug 2025 17:01:39 +0200 Subject: [PATCH 19/33] fix: refactor to support postgres enums --- CodeGenerator/CodeGenerator.cs | 99 ++----- CodeGenerator/Generators/EnumsGen.cs | 13 + Drivers/DbDriver.cs | 69 +++-- Drivers/Generators/CommonGen.cs | 26 +- Drivers/MySqlConnectorDriver.cs | 80 +++++- Drivers/NpgsqlDriver.cs | 60 ++++- Drivers/QueryAnnotations.cs | 2 +- Drivers/SqliteDriver.cs | 17 +- Extensions/StringExtensions.cs | 1 + RequestRunner/App.cs | 3 + docs/04_Postgres.md | 2 +- end2end/EndToEndScaffold/Config.cs | 3 + .../EndToEndScaffold/Templates/MySqlTests.cs | 94 +++---- .../Templates/PostgresTests.cs | 27 ++ .../NpgsqlDapperTester.generated.cs | 18 ++ .../EndToEndTests/NpgsqlTester.generated.cs | 18 ++ .../NpgsqlDapperTester.generated.cs | 18 ++ .../NpgsqlTester.generated.cs | 18 ++ .../MySqlConnectorDapperExample/Models.cs | 48 ++++ .../Models.cs | 48 ++++ examples/MySqlConnectorExample/Models.cs | 48 ++++ .../MySqlConnectorLegacyExample/Models.cs | 48 ++++ examples/NpgsqlDapperExample/Models.cs | 89 ++++++- examples/NpgsqlDapperExample/QuerySql.cs | 76 +++++- examples/NpgsqlDapperExample/request.json | 243 +++++++++++++++++- examples/NpgsqlDapperExample/request.message | 58 +++-- examples/NpgsqlDapperLegacyExample/Models.cs | 87 +++++++ .../NpgsqlDapperLegacyExample/QuerySql.cs | 76 +++++- .../NpgsqlDapperLegacyExample/request.json | 243 +++++++++++++++++- .../NpgsqlDapperLegacyExample/request.message | 58 +++-- examples/NpgsqlExample/Models.cs | 85 +++++- examples/NpgsqlExample/QuerySql.cs | 141 +++++++++- examples/NpgsqlExample/request.json | 243 +++++++++++++++++- examples/NpgsqlExample/request.message | 58 +++-- examples/NpgsqlLegacyExample/Models.cs | 87 +++++++ examples/NpgsqlLegacyExample/QuerySql.cs | 152 ++++++++++- examples/NpgsqlLegacyExample/request.json | 243 +++++++++++++++++- examples/NpgsqlLegacyExample/request.message | 58 +++-- examples/config/postgresql/authors/query.sql | 9 + examples/config/postgresql/authors/schema.sql | 11 + examples/config/postgresql/types/query.sql | 3 + examples/config/postgresql/types/schema.sql | 5 +- 42 files changed, 2460 insertions(+), 325 deletions(-) diff --git a/CodeGenerator/CodeGenerator.cs b/CodeGenerator/CodeGenerator.cs index 6c74c030..7cefc15d 100644 --- a/CodeGenerator/CodeGenerator.cs +++ b/CodeGenerator/CodeGenerator.cs @@ -15,15 +15,8 @@ namespace SqlcGenCsharp; public class CodeGenerator { - private readonly HashSet _excludedSchemas = - [ - "pg_catalog", - "information_schema" - ]; - private Options? _options; - private Dictionary>? _tables; - private Dictionary>? _enums; + private Catalog? _catalog; private List? _queries; private DbDriver? _dbDriver; private QueriesGen? _queriesGen; @@ -37,16 +30,10 @@ private Options Options set => _options = value; } - private Dictionary> Tables + private Catalog Catalog { - get => _tables!; - set => _tables = value; - } - - private Dictionary> Enums - { - get => _enums!; - set => _enums = value; + get => _catalog!; + set => _catalog = value; } private List Queries @@ -89,79 +76,29 @@ private void InitGenerators(GenerateRequest generateRequest) { var outputDirectory = generateRequest.Settings.Codegen.Out; var projectName = new DirectoryInfo(outputDirectory).Name; - Options = new Options(generateRequest); + Options = new(generateRequest); if (Options.DebugRequest) return; Queries = generateRequest.Queries.ToList(); - Tables = ConstructTablesLookup(generateRequest.Catalog); - Enums = ConstructEnumsLookup(generateRequest.Catalog); - var namespaceName = Options.NamespaceName == string.Empty ? projectName : Options.NamespaceName; - DbDriver = InstantiateDriver(generateRequest.Catalog.DefaultSchema); + Catalog = generateRequest.Catalog; + DbDriver = InstantiateDriver(); // initialize file generators - CsprojGen = new CsprojGen(outputDirectory, projectName, namespaceName, Options); - QueriesGen = new QueriesGen(DbDriver, namespaceName); - ModelsGen = new ModelsGen(DbDriver, namespaceName); - UtilsGen = new UtilsGen(DbDriver, namespaceName); - } - - private Dictionary> ConstructTablesLookup(Catalog catalog) - { - return catalog.Schemas - .Where(s => !_excludedSchemas.Contains(s.Name)) - .ToDictionary( - s => s.Name == catalog.DefaultSchema ? string.Empty : s.Name, - s => s.Tables.ToDictionary(t => t.Rel.Name, t => t)); - } - - /// - /// Enums in the request exist only in the default schema (in mysql), this remaps enums to their original schema. - /// - /// - /// - private Dictionary> ConstructEnumsLookup(Catalog catalog) - { - var defaultSchemaCatalog = catalog.Schemas.First(s => s.Name == catalog.DefaultSchema); - var schemaEnumTuples = defaultSchemaCatalog.Enums - .Select(e => new - { - EnumItem = e, - Schema = FindEnumSchema(e) - }); - var schemaToEnums = schemaEnumTuples - .GroupBy(x => x.Schema) - .ToDictionary( - group => group.Key, - group => group.ToDictionary( - x => x.EnumItem.Name, - x => x.EnumItem) - ); - return schemaToEnums; - } - - private string FindEnumSchema(Enum e) - { - foreach (var schemaTables in Tables) - { - foreach (var table in schemaTables.Value) - { - var isEnumColumn = table.Value.Columns.Any(c => c.Type.Name == e.Name); - if (isEnumColumn) - return schemaTables.Key; - } - } - throw new InvalidDataException($"No enum {e.Name} schema found."); + CsprojGen = new(outputDirectory, projectName, namespaceName, Options); + QueriesGen = new(DbDriver, namespaceName); + ModelsGen = new(DbDriver, namespaceName); + UtilsGen = new(DbDriver, namespaceName); } - private DbDriver InstantiateDriver(string defaultSchema) + private DbDriver InstantiateDriver() { return Options.DriverName switch { - DriverName.MySqlConnector => new MySqlConnectorDriver(Options, defaultSchema, Tables, Enums, Queries), - DriverName.Npgsql => new NpgsqlDriver(Options, defaultSchema, Tables, Enums, Queries), - DriverName.Sqlite => new SqliteDriver(Options, defaultSchema, Tables, Enums, Queries), + DriverName.MySqlConnector => new MySqlConnectorDriver(Options, Catalog, Queries), + DriverName.Npgsql => new NpgsqlDriver(Options, Catalog, Queries), + DriverName.Sqlite => new SqliteDriver(Options, Catalog, Queries), _ => throw new ArgumentException($"unknown driver: {Options.DriverName}") }; } @@ -182,7 +119,7 @@ public Task Generate(GenerateRequest generateRequest) var files = GetFileQueries() .Select(fq => QueriesGen.GenerateFile(fq.Value, fq.Key)) .AddRangeExcludeNulls([ - ModelsGen.GenerateFile(Tables, Enums), + ModelsGen.GenerateFile(DbDriver.Tables, DbDriver.Enums), UtilsGen.GenerateFile() ]) .AddRangeIf([CsprojGen.GenerateFile()], Options.GenerateCsproj); @@ -218,12 +155,12 @@ private static Plugin.File RequestToJsonFile(GenerateRequest request) { var formatter = new JsonFormatter(JsonFormatter.Settings.Default.WithIndentation()); request.PluginOptions = GetOptionsWithoutDebugRequest(request); - return new Plugin.File { Name = "request.json", Contents = ByteString.CopyFromUtf8(formatter.Format(request)) }; + return new() { Name = "request.json", Contents = ByteString.CopyFromUtf8(formatter.Format(request)) }; } private static Plugin.File RequestToProtobufFile(GenerateRequest request) { request.PluginOptions = GetOptionsWithoutDebugRequest(request); - return new Plugin.File { Name = "request.message", Contents = request.ToByteString() }; + return new() { Name = "request.message", Contents = request.ToByteString() }; } } \ No newline at end of file diff --git a/CodeGenerator/Generators/EnumsGen.cs b/CodeGenerator/Generators/EnumsGen.cs index 777e4b75..3e06aa32 100644 --- a/CodeGenerator/Generators/EnumsGen.cs +++ b/CodeGenerator/Generators/EnumsGen.cs @@ -31,12 +31,25 @@ public static class {{name}}Extensions .Select(v => $"[\"{v}\"] = {name}.{v.ToPascalCase()}") .JoinByComma()}} }; + + private static readonly Dictionary<{{name}}, string> EnumToString = new Dictionary<{{name}}, string>() + { + [{{name}}.Invalid] = string.Empty, + {{possibleValues + .Select(v => $"[{name}.{v.ToPascalCase()}] = \"{v}\"") + .JoinByComma()}} + }; public static {{name}} To{{name}}(this string me) { return StringToEnum[me]; } + public static string Stringify(this {{name}} me) + { + return EnumToString[me]; + } + public static HashSet<{{name}}> To{{name}}Set(this string me) { return new HashSet<{{name}}>(me.Split(',').ToList().Select(v => StringToEnum[v])); diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 0b7886f2..2fccdb5c 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -1,6 +1,5 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; -using SqlcGenCsharp.Drivers.Generators; using System; using System.Collections.Generic; using System.Linq; @@ -56,10 +55,9 @@ public abstract class DbDriver "NpgsqlCidr", ]; - public abstract Dictionary ColumnMappings { get; } + protected abstract Dictionary ColumnMappings { get; } - protected const string JsonElementTypeHandler = - """ + protected const string JsonElementTypeHandler = """ private class JsonElementTypeHandler : SqlMapper.TypeHandler { public override JsonElement Parse(object value) @@ -84,34 +82,30 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz } """; - public readonly string TransactionConnectionNullExcetionThrow = - $""" + public readonly string TransactionConnectionNullExcetionThrow = $""" if (this.{Variable.Transaction.AsPropertyName()}?.Connection == null || this.{Variable.Transaction.AsPropertyName()}?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); """; protected DbDriver( Options options, - string defaultSchema, - Dictionary> tables, - Dictionary> enums, + Catalog catalog, IList queries) { Options = options; - DefaultSchema = defaultSchema; - Tables = tables; - Enums = enums; + DefaultSchema = catalog.DefaultSchema; + Tables = ConstructTablesLookup(catalog); Queries = queries; + Enums = ConstructEnumsLookup(catalog); foreach (var schemaEnums in Enums) - { foreach (var e in schemaEnums.Value) { NullableTypes.Add(e.Key.ToModelName(schemaEnums.Key, DefaultSchema)); } - } - if (!Options.DotnetFramework.IsDotnetCore()) return; + if (!Options.DotnetFramework.IsDotnetCore()) + return; foreach (var t in NullableTypesInDotnetCore) { @@ -119,6 +113,23 @@ protected DbDriver( } } + private readonly HashSet _excludedSchemas = + [ + "pg_catalog", + "information_schema" + ]; + + private Dictionary> ConstructTablesLookup(Catalog catalog) + { + return catalog.Schemas + .Where(s => !_excludedSchemas.Contains(s.Name)) + .ToDictionary( + s => s.Name == catalog.DefaultSchema ? string.Empty : s.Name, + s => s.Tables.ToDictionary(t => t.Rel.Name, t => t)); + } + + protected abstract Dictionary> ConstructEnumsLookup(Catalog catalog); + public virtual ISet GetUsingDirectivesForQueries() { return new HashSet @@ -206,8 +217,10 @@ public static void ConfigureSqlMapper() """)!]; } - public string GetColumnSchema(Column column) + protected string GetColumnSchema(Column column) { + if (column.Table == null) + return string.Empty; return column.Table.Schema == DefaultSchema ? string.Empty : column.Table.Schema; } @@ -274,7 +287,7 @@ protected bool CopyFromQueryExists() return Queries.Any(q => q.Cmd is ":copyfrom"); } - public OverrideOption? FindOverrideForQueryColumn(Query? query, Column column) + private OverrideOption? FindOverrideForQueryColumn(Query? query, Column column) { if (query is null) return null; @@ -321,7 +334,7 @@ public bool IsTypeNullable(string csharpType) return Options.DotnetFramework.IsDotnetCore(); // non-primitives in .Net Core are inherently nullable } - public string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) + protected string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) { if (column.EmbedTable != null) return column.EmbedTable.Name.ToModelName(column.EmbedTable.Schema, DefaultSchema); @@ -355,6 +368,17 @@ private static bool DoesColumnMappingApply(ColumnMapping columnMapping, Column c return typeInfo.Length.Value == column.Length; } + public virtual Func? GetWriterFn(Column column, Query query) + { + var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); + var writerFn = ColumnMappings.GetValueOrDefault(csharpType)?.WriterFn; + if (writerFn is not null) + return writerFn; + + static string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; + return Options.UseDapper ? null : DefaultWriterFn; + } + /* Column reader methods */ private string GetColumnReader(CsharpTypeOption csharpTypeOption, int ordinal) { @@ -392,17 +416,16 @@ public string GetColumnReader(Column column, int ordinal, Query? query) } /* Enum methods*/ - public Plugin.Enum? GetEnumType(Column column) + protected Plugin.Enum? GetEnumType(Column column) { - if (column.Table is null) - return null; var schemaName = GetColumnSchema(column); if (!Enums.TryGetValue(schemaName, value: out var enumsInSchema)) return null; - return enumsInSchema.GetValueOrDefault(column.Type.Name); + var enumNameWithoutSchema = column.Type.Name.Replace($"{schemaName}.", ""); + return enumsInSchema.GetValueOrDefault(enumNameWithoutSchema); } - public virtual string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) + protected virtual string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) { return column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); } diff --git a/Drivers/Generators/CommonGen.cs b/Drivers/Generators/CommonGen.cs index 16283e04..e295e0ed 100644 --- a/Drivers/Generators/CommonGen.cs +++ b/Drivers/Generators/CommonGen.cs @@ -14,28 +14,6 @@ public static string GetMethodParameterList(string argInterface, IEnumerable? GetWriterFn(Column column, Query query) - { - var csharpType = dbDriver.GetCsharpTypeWithoutNullableSuffix(column, query); - var writerFn = dbDriver.ColumnMappings.GetValueOrDefault(csharpType)?.WriterFn; - if (writerFn is not null) - return writerFn; - - if (dbDriver.GetEnumType(column) is { } enumType) - if (dbDriver.EnumToCsharpTypeName(column, enumType).StartsWith("HashSet")) - return (el, notNull, isDapper) => - { - var stringJoinStmt = $"string.Join(\",\", {el})"; - var nullValue = isDapper ? "null" : "(object)DBNull.Value"; - return notNull - ? stringJoinStmt - : $"{el} != null ? {stringJoinStmt} : {nullValue}"; - }; - - string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; - return dbDriver.Options.UseDapper ? null : DefaultWriterFn; - } - // TODO: extract AddWithValue statement generation to a method + possible override for Npgsql for type override public string AddParametersToCommand(Query query) { @@ -50,7 +28,7 @@ public string AddParametersToCommand(Query query) """; var notNull = dbDriver.IsColumnNotNull(p.Column, query); - var writerFn = GetWriterFn(p.Column, query); + var writerFn = dbDriver.GetWriterFn(p.Column, query); var paramToWrite = writerFn is null ? param : writerFn(param, notNull, dbDriver.Options.UseDapper); var addParamToCommand = $"""{commandVar}.Parameters.AddWithValue("@{p.Column.Name}", {paramToWrite});"""; return addParamToCommand; @@ -79,7 +57,7 @@ public string ConstructDapperParamsDict(Query query) param += "?.ToEnumString()"; var notNull = dbDriver.IsColumnNotNull(p.Column, query); - var writerFn = GetWriterFn(p.Column, query); + var writerFn = dbDriver.GetWriterFn(p.Column, query); var paramToWrite = writerFn is null ? $"{argsVar}.{param}" : writerFn($"{argsVar}.{param}", notNull, dbDriver.Options.UseDapper); var addParamToDict = $"{queryParamsVar}.Add(\"{p.Column.Name}\", {paramToWrite});"; return addParamToDict; diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index c4a0d0cb..c0348487 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -1,22 +1,23 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; using SqlcGenCsharp.Drivers.Generators; +using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Text.RegularExpressions; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; namespace SqlcGenCsharp.Drivers; -public partial class MySqlConnectorDriver( +public sealed partial class MySqlConnectorDriver( Options options, - string defaultSchema, - Dictionary> tables, - Dictionary> enums, + Catalog catalog, IList queries) : - DbDriver(options, defaultSchema, tables, enums, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom + DbDriver(options, catalog, queries), + IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { - public override Dictionary ColumnMappings { get; } = + protected override Dictionary ColumnMappings { get; } = new() { /* Numeric data types */ @@ -155,7 +156,6 @@ public partial class MySqlConnectorDriver( public override string TransactionClassName => "MySqlTransaction"; - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) { @@ -188,7 +188,6 @@ public MemberDeclarationSyntax CopyFromDeclare(string queryTextConstant, string return new CopyFromDeclareGen(this).Generate(queryTextConstant, argInterface, query); } - public override ISet GetUsingDirectivesForQueries() { return base @@ -401,7 +400,7 @@ public class {{x}}CsvConverter : DefaultTypeConverter public override ConnectionGenCommands EstablishConnection(Query query) { - return new ConnectionGenCommands( + return new( $"var {Variable.Connection.AsVarName()} = new MySqlConnection({Variable.ConnectionString.AsPropertyName()})", $"await {Variable.Connection.AsVarName()}.OpenAsync()" ); @@ -441,9 +440,9 @@ public override string[] GetLastIdStatement(Query query) /* :copyfrom methods */ public const string NullToStringCsvConverter = "NullToStringCsvConverter"; - public const string BoolToBitCsvConverter = "BoolToBitCsvConverter"; - public const string ByteCsvConverter = "ByteCsvConverter"; - public const string ByteArrayCsvConverter = "ByteArrayCsvConverter"; + private const string BoolToBitCsvConverter = "BoolToBitCsvConverter"; + private const string ByteCsvConverter = "ByteCsvConverter"; + private const string ByteArrayCsvConverter = "ByteArrayCsvConverter"; public string GetCopyFromImpl(Query query, string queryTextConstant) { @@ -570,13 +569,13 @@ private ISet GetSetConverters(Query query) } /* Enum methods */ - public override string EnumToCsharpTypeName(Column column, Enum enumType) + protected override string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) { var enumName = column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); return IsSetDataType(column, enumType) ? $"HashSet<{enumName}>" : enumName; } - private static bool IsSetDataType(Column column, Enum enumType) + private static bool IsSetDataType(Column column, Plugin.Enum enumType) { return column.Length > enumType.Vals.Select(v => v.Length).Sum(); } @@ -586,4 +585,57 @@ private bool IsSetDataType(Column column) var enumType = GetEnumType(column); return enumType is not null && IsSetDataType(column, enumType); } + + protected override Dictionary> ConstructEnumsLookup(Catalog catalog) + { + var defaultSchemaCatalog = catalog.Schemas.First(s => s.Name == catalog.DefaultSchema); + return defaultSchemaCatalog.Enums + .Select(e => new + { + EnumItem = e, + Schema = FindEnumSchema(e) + }) + .GroupBy(x => x.Schema) + .ToDictionary( + group => group.Key, + group => group.ToDictionary( + x => x.EnumItem.Name, + x => x.EnumItem) + ); + } + + private string FindEnumSchema(Plugin.Enum e) + { + foreach (var schemaTables in Tables) + { + foreach (var table in schemaTables.Value) + { + var isEnumColumn = table.Value.Columns.Any(c => c.Type.Name == e.Name); + if (isEnumColumn) + return schemaTables.Key; + } + } + throw new InvalidDataException($"No enum {e.Name} schema found."); + } + + public override Func? GetWriterFn(Column column, Query query) + { + var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); + var writerFn = ColumnMappings.GetValueOrDefault(csharpType)?.WriterFn; + if (writerFn is not null) + return writerFn; + + if (GetEnumType(column) is { } enumType && IsSetDataType(column, enumType)) + return (el, notNull, isDapper) => + { + var stringJoinStmt = $"string.Join(\",\", {el})"; + var nullValue = isDapper ? "null" : "(object)DBNull.Value"; + return notNull + ? stringJoinStmt + : $"{el} != null ? {stringJoinStmt} : {nullValue}"; + }; + + static string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; + return Options.UseDapper ? null : DefaultWriterFn; + } } \ No newline at end of file diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index 9a9d7130..f3592421 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -1,6 +1,7 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; using SqlcGenCsharp.Drivers.Generators; +using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; @@ -8,15 +9,13 @@ namespace SqlcGenCsharp.Drivers; -public class NpgsqlDriver : DbDriver, IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom +public sealed class NpgsqlDriver : DbDriver, IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { public NpgsqlDriver( Options options, - string defaultSchema, - Dictionary> tables, - Dictionary> enums, + Catalog catalog, IList queries) : - base(options, defaultSchema, tables, enums, queries) + base(options, catalog, queries) { foreach (var columnMapping in ColumnMappings.Values) { @@ -26,12 +25,9 @@ public NpgsqlDriver( columnMapping.DbTypes.Add(dbTypeToAdd, dbType.Value); } } - CommonGen = new CommonGen(this); } - private CommonGen CommonGen { get; } - - public sealed override Dictionary ColumnMappings { get; } = + protected sealed override Dictionary ColumnMappings { get; } = new() { /* Numeric data types */ @@ -325,7 +321,7 @@ public NpgsqlDriver( public override string TransactionClassName => "NpgsqlTransaction"; - protected const string XmlDocumentTypeHandler = + private const string XmlDocumentTypeHandler = """ private class XmlDocumentTypeHandler : SqlMapper.TypeHandler { @@ -347,7 +343,6 @@ public override void SetValue(IDbDataParameter parameter, XmlDocument value) } """; - public MemberDeclarationSyntax OneDeclare(string queryTextConstant, string argInterface, string returnInterface, Query query) { @@ -393,7 +388,8 @@ public override ISet GetUsingDirectivesForModels() { return base.GetUsingDirectivesForModels().AddRangeExcludeNulls( [ - "System" + "System", + "System.Collections.Generic" ]); } @@ -518,7 +514,7 @@ string AddRowsToCopyCommand() { var typeOverride = GetColumnDbTypeOverride(p.Column); var param = $"{rowVar}.{p.Column.Name.ToPascalCase()}"; - var writerFn = CommonGen.GetWriterFn(p.Column, query); + var writerFn = GetWriterFn(p.Column, query); var paramToWrite = writerFn is null ? param : writerFn(param, p.Column.NotNull, false); var partialStmt = $"await {writerVar}.WriteAsync({paramToWrite}"; return typeOverride is null @@ -535,4 +531,42 @@ string AddRowsToCopyCommand() """; } } + + protected override Dictionary> ConstructEnumsLookup(Catalog catalog) + { + return catalog + .Schemas + .SelectMany(s => s.Enums.Select(e => new { EnumItem = e, Schema = s.Name })) + .GroupBy(x => x.Schema == catalog.DefaultSchema ? string.Empty : x.Schema) + .ToDictionary( + group => group.Key, + group => group.ToDictionary( + x => x.EnumItem.Name, + x => x.EnumItem + ) + ); + } + + public override Func? GetWriterFn(Column column, Query query) + { + var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); + var writerFn = ColumnMappings.GetValueOrDefault(csharpType)?.WriterFn; + if (writerFn is not null) + return writerFn; + + if (GetEnumType(column) is { } enumType) + { + return (el, notNull, isDapper) => + { + var enumToStringStmt = $"{el}.Value.Stringify()"; + var nullValue = isDapper ? "null" : "(object)DBNull.Value"; + return notNull + ? enumToStringStmt + : $"{el} != null ? {enumToStringStmt} : {nullValue}"; + }; + } + + static string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; + return Options.UseDapper ? null : DefaultWriterFn; + } } \ No newline at end of file diff --git a/Drivers/QueryAnnotations.cs b/Drivers/QueryAnnotations.cs index 4bd347ee..65531a47 100644 --- a/Drivers/QueryAnnotations.cs +++ b/Drivers/QueryAnnotations.cs @@ -1,5 +1,6 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; +using System.Collections.Generic; namespace SqlcGenCsharp.Drivers; @@ -15,7 +16,6 @@ public interface IExec { MemberDeclarationSyntax ExecDeclare(string queryTextConstant, string argInterface, Query query); } - public interface IExecRows { MemberDeclarationSyntax ExecRowsDeclare(string queryTextConstant, string argInterface, Query query); diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index 833326a3..f2954994 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -8,15 +8,13 @@ namespace SqlcGenCsharp.Drivers; -public partial class SqliteDriver( +public sealed partial class SqliteDriver( Options options, - string defaultSchema, - Dictionary> tables, - Dictionary> enums, + Catalog catalog, IList queries) : - DbDriver(options, defaultSchema, tables, enums, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom + DbDriver(options, catalog, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { - public override Dictionary ColumnMappings { get; } = + protected override Dictionary ColumnMappings { get; } = new() { ["byte[]"] = new( @@ -152,7 +150,7 @@ public override MemberDeclarationSyntax[] GetMemberDeclarationsForUtils() public override ConnectionGenCommands EstablishConnection(Query query) { - return new ConnectionGenCommands( + return new( $"var {Variable.Connection.AsVarName()} = new SqliteConnection({Variable.ConnectionString.AsPropertyName()})", $"await {Variable.Connection.AsVarName()}.OpenAsync()" ); @@ -231,4 +229,9 @@ string AddParametersToCommand() """; } } + + protected override Dictionary> ConstructEnumsLookup(Catalog catalog) + { + return []; + } } \ No newline at end of file diff --git a/Extensions/StringExtensions.cs b/Extensions/StringExtensions.cs index 4594c8f9..0aa0cd58 100644 --- a/Extensions/StringExtensions.cs +++ b/Extensions/StringExtensions.cs @@ -38,6 +38,7 @@ public static string ToCamelCase(this string value) public static string ToModelName(this string value, string schema, string defaultSchema) { var schemaName = schema == defaultSchema ? string.Empty : schema; + value = value.Replace($"{schemaName}.", ""); return $"{schemaName}_{value.TrimEnd('s')}".ToPascalCase(); // TODO implement better way to turn words to singular } diff --git a/RequestRunner/App.cs b/RequestRunner/App.cs index 169ab65b..4caf0b3d 100644 --- a/RequestRunner/App.cs +++ b/RequestRunner/App.cs @@ -10,7 +10,10 @@ public static class App public static async Task Main(string[] requestFiles) { foreach (var requestFile in requestFiles) + { await ProcessRequestFile(requestFile); + break; + } } private static async Task ProcessRequestFile(string requestFile) diff --git a/docs/04_Postgres.md b/docs/04_Postgres.md index 2f980017..6b66fb0c 100644 --- a/docs/04_Postgres.md +++ b/docs/04_Postgres.md @@ -66,7 +66,7 @@ we consider support for the different data types separately for batch inserts an | jsonb | тЬЕ | тЭМ | | jsonpath | тЬЕ | тЭМ | | xml | тЬЕ | тЭМ | -| enum | тЭМ | тЭМ | +| enum | тЬЕ | тЭМ | *** `time with time zone` is not useful and not recommended to use by Postgres themselves - see [here](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME) - diff --git a/end2end/EndToEndScaffold/Config.cs b/end2end/EndToEndScaffold/Config.cs index ecd8af38..62dbef09 100644 --- a/end2end/EndToEndScaffold/Config.cs +++ b/end2end/EndToEndScaffold/Config.cs @@ -59,6 +59,7 @@ public enum KnownTestType PostgresInvalidJson, PostgresXmlDataTypes, PostgresInvalidXml, + PostgresEnumDataType, ArrayAsParam, MultipleArraysAsParams, @@ -207,6 +208,7 @@ internal static class Config KnownTestType.PostgresNetworkDataTypes, KnownTestType.PostgresXmlDataTypes, KnownTestType.PostgresInvalidXml, + KnownTestType.PostgresEnumDataType, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, @@ -252,6 +254,7 @@ internal static class Config KnownTestType.PostgresNetworkDataTypes, KnownTestType.PostgresXmlDataTypes, KnownTestType.PostgresInvalidXml, + KnownTestType.PostgresEnumDataType, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index 43c07b46..8039df79 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -269,53 +269,6 @@ void AssertSingularEquals(QuerySql.GetMysqlBinaryTypesRow x, QuerySql.GetMysqlBi } """ }, - [KnownTestType.MySqlEnumDataType] = new TestImpl - { - Impl = $$""" - private static IEnumerable MySqlEnumTypesTestCases - { - get - { - yield return new TestCaseData( - MysqlStringTypesCEnum.Medium, - new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee } - ).SetName("Valid Enum values"); - - yield return new TestCaseData( - null, - null - ).SetName("Enum with null values"); - } - } - - [Test] - [TestCaseSource(nameof(MySqlEnumTypesTestCases))] - public async Task TestMySqlStringTypes( - MysqlStringTypesCEnum? cEnum, - HashSet cSet) - { - await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs - { - CEnum = cEnum, - CSet = cSet - }); - - var expected = new QuerySql.GetMysqlStringTypesRow - { - CEnum = cEnum, - CSet = cSet - }; - var actual = await QuerySql.GetMysqlStringTypes(); - AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - - void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) - { - Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); - Assert.That(x.CSet, Is.EqualTo(y.CSet)); - } - } - """ - }, [KnownTestType.MySqlStringCopyFrom] = new TestImpl { Impl = $$""" @@ -660,6 +613,53 @@ public async Task TestMySqlTransactionRollback() } """ }, + [KnownTestType.MySqlEnumDataType] = new TestImpl + { + Impl = $$""" + private static IEnumerable MySqlEnumTypesTestCases + { + get + { + yield return new TestCaseData( + MysqlStringTypesCEnum.Medium, + new HashSet { MysqlStringTypesCSet.Tea, MysqlStringTypesCSet.Coffee } + ).SetName("Valid Enum values"); + + yield return new TestCaseData( + null, + null + ).SetName("Enum with null values"); + } + } + + [Test] + [TestCaseSource(nameof(MySqlEnumTypesTestCases))] + public async Task TestMySqlStringTypes( + MysqlStringTypesCEnum? cEnum, + HashSet cSet) + { + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs + { + CEnum = cEnum, + CSet = cSet + }); + + var expected = new QuerySql.GetMysqlStringTypesRow + { + CEnum = cEnum, + CSet = cSet + }; + var actual = await QuerySql.GetMysqlStringTypes(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlStringTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + Assert.That(x.CSet, Is.EqualTo(y.CSet)); + } + } + """ + }, [KnownTestType.MySqlEnumCopyFrom] = new TestImpl { Impl = $$""" diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 1970cad8..986c1e8b 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -1080,6 +1080,33 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgre } } """ + }, + [KnownTestType.PostgresEnumDataType] = new TestImpl + { + Impl = $$""" + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + { + CEnum = cEnum + }); + + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + """ } }; } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 50aa80d1..e64cb342 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -924,6 +924,24 @@ public void TestPostgresInvalidXml() Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index ff46a314..2b624b9b 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -924,6 +924,24 @@ public void TestPostgresInvalidXml() Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 4babb377..f554ab29 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -924,6 +924,24 @@ public void TestPostgresInvalidXml() Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] public async Task TestArray() { diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 0ee9a507..3232c115 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -924,6 +924,24 @@ public void TestPostgresInvalidXml() Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] public async Task TestArray() { diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index d71efd9e..fb0ee0ab 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -93,11 +93,23 @@ public static class MysqlStringTypesCEnumExtensions ["medium"] = MysqlStringTypesCEnum.Medium, ["big"] = MysqlStringTypesCEnum.Big }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" + }; public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCEnum me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -121,11 +133,23 @@ public static class MysqlStringTypesCSetExtensions ["coffee"] = MysqlStringTypesCSet.Coffee, ["milk"] = MysqlStringTypesCSet.Milk }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" + }; public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCSet me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCSetSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -149,11 +173,23 @@ public static class ExtendedBiosBioTypeExtensions ["Biography"] = ExtendedBiosBioType.Biography, ["Memoir"] = ExtendedBiosBioType.Memoir }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosBioType.Invalid] = string.Empty, + [ExtendedBiosBioType.Autobiography] = "Autobiography", + [ExtendedBiosBioType.Biography] = "Biography", + [ExtendedBiosBioType.Memoir] = "Memoir" + }; public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosBioType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosBioTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -177,11 +213,23 @@ public static class ExtendedBiosAuthorTypeExtensions ["Editor"] = ExtendedBiosAuthorType.Editor, ["Translator"] = ExtendedBiosAuthorType.Translator }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosAuthorType.Invalid] = string.Empty, + [ExtendedBiosAuthorType.Author] = "Author", + [ExtendedBiosAuthorType.Editor] = "Editor", + [ExtendedBiosAuthorType.Translator] = "Translator" + }; public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosAuthorType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); diff --git a/examples/MySqlConnectorDapperLegacyExample/Models.cs b/examples/MySqlConnectorDapperLegacyExample/Models.cs index dec1fb27..fffbc9ad 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Models.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Models.cs @@ -94,11 +94,23 @@ public static class MysqlStringTypesCEnumExtensions ["medium"] = MysqlStringTypesCEnum.Medium, ["big"] = MysqlStringTypesCEnum.Big }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" + }; public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCEnum me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -122,11 +134,23 @@ public static class MysqlStringTypesCSetExtensions ["coffee"] = MysqlStringTypesCSet.Coffee, ["milk"] = MysqlStringTypesCSet.Milk }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" + }; public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCSet me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCSetSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -150,11 +174,23 @@ public static class ExtendedBiosBioTypeExtensions ["Biography"] = ExtendedBiosBioType.Biography, ["Memoir"] = ExtendedBiosBioType.Memoir }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosBioType.Invalid] = string.Empty, + [ExtendedBiosBioType.Autobiography] = "Autobiography", + [ExtendedBiosBioType.Biography] = "Biography", + [ExtendedBiosBioType.Memoir] = "Memoir" + }; public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosBioType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosBioTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -178,11 +214,23 @@ public static class ExtendedBiosAuthorTypeExtensions ["Editor"] = ExtendedBiosAuthorType.Editor, ["Translator"] = ExtendedBiosAuthorType.Translator }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosAuthorType.Invalid] = string.Empty, + [ExtendedBiosAuthorType.Author] = "Author", + [ExtendedBiosAuthorType.Editor] = "Editor", + [ExtendedBiosAuthorType.Translator] = "Translator" + }; public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosAuthorType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index d904f923..6c575cd9 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -29,11 +29,23 @@ public static class MysqlStringTypesCEnumExtensions ["medium"] = MysqlStringTypesCEnum.Medium, ["big"] = MysqlStringTypesCEnum.Big }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" + }; public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCEnum me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -57,11 +69,23 @@ public static class MysqlStringTypesCSetExtensions ["coffee"] = MysqlStringTypesCSet.Coffee, ["milk"] = MysqlStringTypesCSet.Milk }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" + }; public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCSet me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCSetSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -85,11 +109,23 @@ public static class ExtendedBiosBioTypeExtensions ["Biography"] = ExtendedBiosBioType.Biography, ["Memoir"] = ExtendedBiosBioType.Memoir }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosBioType.Invalid] = string.Empty, + [ExtendedBiosBioType.Autobiography] = "Autobiography", + [ExtendedBiosBioType.Biography] = "Biography", + [ExtendedBiosBioType.Memoir] = "Memoir" + }; public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosBioType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosBioTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -113,11 +149,23 @@ public static class ExtendedBiosAuthorTypeExtensions ["Editor"] = ExtendedBiosAuthorType.Editor, ["Translator"] = ExtendedBiosAuthorType.Translator }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosAuthorType.Invalid] = string.Empty, + [ExtendedBiosAuthorType.Author] = "Author", + [ExtendedBiosAuthorType.Editor] = "Editor", + [ExtendedBiosAuthorType.Translator] = "Translator" + }; public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosAuthorType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index 1adb9108..51861f59 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -94,11 +94,23 @@ public static class MysqlStringTypesCEnumExtensions ["medium"] = MysqlStringTypesCEnum.Medium, ["big"] = MysqlStringTypesCEnum.Big }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" + }; public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCEnum me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -122,11 +134,23 @@ public static class MysqlStringTypesCSetExtensions ["coffee"] = MysqlStringTypesCSet.Coffee, ["milk"] = MysqlStringTypesCSet.Milk }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" + }; public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } + public static string Stringify(this MysqlStringTypesCSet me) + { + return EnumToString[me]; + } + public static HashSet ToMysqlStringTypesCSetSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -150,11 +174,23 @@ public static class ExtendedBiosBioTypeExtensions ["Biography"] = ExtendedBiosBioType.Biography, ["Memoir"] = ExtendedBiosBioType.Memoir }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosBioType.Invalid] = string.Empty, + [ExtendedBiosBioType.Autobiography] = "Autobiography", + [ExtendedBiosBioType.Biography] = "Biography", + [ExtendedBiosBioType.Memoir] = "Memoir" + }; public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosBioType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosBioTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); @@ -178,11 +214,23 @@ public static class ExtendedBiosAuthorTypeExtensions ["Editor"] = ExtendedBiosAuthorType.Editor, ["Translator"] = ExtendedBiosAuthorType.Translator }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBiosAuthorType.Invalid] = string.Empty, + [ExtendedBiosAuthorType.Author] = "Author", + [ExtendedBiosAuthorType.Editor] = "Editor", + [ExtendedBiosAuthorType.Translator] = "Translator" + }; public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) { return StringToEnum[me]; } + public static string Stringify(this ExtendedBiosAuthorType me) + { + return EnumToString[me]; + } + public static HashSet ToExtendedBiosAuthorTypeSet(this string me) { return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 1b7e04c9..66edb512 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -1,6 +1,7 @@ // auto-generated by sqlc - do not edit using NpgsqlTypes; using System; +using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.NetworkInformation; @@ -35,6 +36,7 @@ public class PostgresType public PhysicalAddress? CMacaddr { get; init; } public string? CMacaddr8 { get; init; } public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } }; public class PostgresUnstructuredType { @@ -77,4 +79,89 @@ public class Book public required string Name { get; init; } public required long AuthorId { get; init; } public string? Description { get; init; } -}; \ No newline at end of file +}; +public class ExtendedBio +{ + public required string AuthorName { get; init; } + public required string Name { get; init; } + public ExtendedBioType? BioType { get; init; } +}; +public enum CEnum +{ + Invalid = 0, // reserved for invalid enum value + Small = 1, + Medium = 2, + Big = 3 +} + +public static class CEnumExtensions +{ + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = CEnum.Invalid, + ["small"] = CEnum.Small, + ["medium"] = CEnum.Medium, + ["big"] = CEnum.Big + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [CEnum.Invalid] = string.Empty, + [CEnum.Small] = "small", + [CEnum.Medium] = "medium", + [CEnum.Big] = "big" + }; + public static CEnum ToCEnum(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this CEnum me) + { + return EnumToString[me]; + } + + public static HashSet ToCEnumSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } +} + +public enum ExtendedBioType +{ + Invalid = 0, // reserved for invalid enum value + Autobiography = 1, + Biography = 2, + Memoir = 3 +} + +public static class ExtendedBioTypeExtensions +{ + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = ExtendedBioType.Invalid, + ["Autobiography"] = ExtendedBioType.Autobiography, + ["Biography"] = ExtendedBioType.Biography, + ["Memoir"] = ExtendedBioType.Memoir + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBioType.Invalid] = string.Empty, + [ExtendedBioType.Autobiography] = "Autobiography", + [ExtendedBioType.Biography] = "Biography", + [ExtendedBioType.Memoir] = "Memoir" + }; + public static ExtendedBioType ToExtendedBioType(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this ExtendedBioType me) + { + return EnumToString[me]; + } + + public static HashSet ToExtendedBioTypeSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } +} \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 5631b310..015e67b9 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -43,7 +43,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -67,6 +67,7 @@ public class InsertPostgresTypesArgs public string? CBpchar { get; init; } public string? CText { get; init; } public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -96,6 +97,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_bpchar", args.CBpchar); queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -181,7 +183,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -205,6 +207,7 @@ public class GetPostgresTypesRow public string? CBpchar { get; init; } public string? CText { get; init; } public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -1063,4 +1066,73 @@ public async Task> GetAuthorsByBookName(GetAuthors } } } + + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs + { + public required string AuthorName { get; init; } + public required string Name { get; init; } + public ExtendedBioType? BioType { get; init; } + }; + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow + { + public required string AuthorName { get; init; } + public required string Name { get; init; } + public ExtendedBioType? BioType { get; init; } + }; + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; init; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateExtendedBiosSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + } } \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index 6540a548..0d2cf59c 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -295,6 +295,16 @@ "type": { "name": "uuid" } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } } ] }, @@ -625,6 +635,16 @@ } ] } + ], + "enums": [ + { + "name": "c_enum", + "vals": [ + "small", + "medium", + "big" + ] + } ] }, { @@ -32468,12 +32488,73 @@ ] } ] + }, + { + "name": "extended", + "tables": [ + { + "rel": { + "schema": "extended", + "name": "bios" + }, + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + } + } + ] + } + ], + "enums": [ + { + "name": "bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" + ] + } + ] } ] }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32815,6 +32896,16 @@ }, { "number": 22, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } + }, + { + "number": 23, "column": { "name": "c_cidr", "length": -1, @@ -32830,7 +32921,7 @@ } }, { - "number": 23, + "number": 24, "column": { "name": "c_inet", "length": -1, @@ -32846,7 +32937,7 @@ } }, { - "number": 24, + "number": 25, "column": { "name": "c_macaddr", "length": -1, @@ -32856,7 +32947,7 @@ } }, { - "number": 25, + "number": 26, "column": { "name": "c_macaddr8", "length": -1, @@ -33231,7 +33322,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33482,6 +33573,17 @@ }, "originalName": "c_uuid" }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + }, { "name": "c_cidr", "length": -1, @@ -35376,6 +35478,137 @@ } ], "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + }, + "originalName": "bio_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index f6eb1d32..7cd672ac 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunner╓ч public"кpublicс +./dist/LocalRunner╖ъ public"∙publicТ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -49,7 +49,8 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuidн +c_uuid0         Rpostgres_typesbuuid/ +c_enum0         Rpostgres_typesbc_enumн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -88,7 +89,8 @@ pg_catalog timestamp name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext" pg_temp"ц▓ + description0         Rbooksbtext" +c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10209,8 +10211,15 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_no║ -М +pg_cataloginformation_schemaviewsb  yes_or_no"Пextended╘ +extendedbiosC + author_name0         Rextendedbiosb +pg_catalogvarchar< +name0         Rextendedbiosb +pg_catalogvarchar= +bio_type0         Rextendedbiosbextendedbio_type", +bio_type Autobiography BiographyMemoir· +й INSERT INTO postgres_types ( c_boolean, @@ -10234,6 +10243,7 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10261,10 +10271,11 @@ VALUES ( $19, $20, $21, - $22, + $22::c_enum, $23, - $24::macaddr, - $25::macaddr8 + $24, + $25::macaddr, + $26::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10291,10 +10302,11 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr82 Basic types : query.sqlBpostgres_typesЗ @@ -10376,8 +10388,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ -┤SELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу +└SELECT c_boolean, c_bit, c_smallint, @@ -10399,6 +10411,7 @@ c_interval*NJ c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10445,7 +10458,8 @@ pg_catalogvarcharz c_varchar"^ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10779,4 +10793,16 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sql"v1.27.0*╘{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +name0         Rbooksbtextzname: query.sqlь +KINSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)CreateExtendedBio:exec*SO + author_name0         Rextendedbiosbpg_catalog.varcharz author_name*EA +name0         Rextendedbiosbpg_catalog.varcharzname*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlBextendedbiosк +QSELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1GetFirstExtendedBioByType:one"P + author_name0         Rextendedbiosb +pg_catalogvarcharz author_name"B +name0         Rextendedbiosb +pg_catalogvarcharzname"G +bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql"v1.27.0*╘{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlDapperExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index 5ff230ae..dcb2cc25 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -3,6 +3,7 @@ namespace NpgsqlDapperLegacyExampleGen { using NpgsqlTypes; using System; + using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.NetworkInformation; @@ -36,6 +37,7 @@ public class PostgresType public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; public class PostgresUnstructuredType { @@ -79,4 +81,89 @@ public class Book public long AuthorId { get; set; } public string Description { get; set; } }; + public class ExtendedBio + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public enum CEnum + { + Invalid = 0, // reserved for invalid enum value + Small = 1, + Medium = 2, + Big = 3 + } + + public static class CEnumExtensions + { + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = CEnum.Invalid, + ["small"] = CEnum.Small, + ["medium"] = CEnum.Medium, + ["big"] = CEnum.Big + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [CEnum.Invalid] = string.Empty, + [CEnum.Small] = "small", + [CEnum.Medium] = "medium", + [CEnum.Big] = "big" + }; + public static CEnum ToCEnum(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this CEnum me) + { + return EnumToString[me]; + } + + public static HashSet ToCEnumSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } + } + + public enum ExtendedBioType + { + Invalid = 0, // reserved for invalid enum value + Autobiography = 1, + Biography = 2, + Memoir = 3 + } + + public static class ExtendedBioTypeExtensions + { + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = ExtendedBioType.Invalid, + ["Autobiography"] = ExtendedBioType.Autobiography, + ["Biography"] = ExtendedBioType.Biography, + ["Memoir"] = ExtendedBioType.Memoir + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBioType.Invalid] = string.Empty, + [ExtendedBioType.Autobiography] = "Autobiography", + [ExtendedBioType.Biography] = "Biography", + [ExtendedBioType.Memoir] = "Memoir" + }; + public static ExtendedBioType ToExtendedBioType(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this ExtendedBioType me) + { + return EnumToString[me]; + } + + public static HashSet ToExtendedBioTypeSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } + } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 359db9b4..e9ba900e 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -44,7 +44,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -68,6 +68,7 @@ public class InsertPostgresTypesArgs public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -97,6 +98,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_bpchar", args.CBpchar); queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); queryParams.Add("c_cidr", args.CCidr); queryParams.Add("c_inet", args.CInet); queryParams.Add("c_macaddr", args.CMacaddr); @@ -182,7 +184,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -206,6 +208,7 @@ public class GetPostgresTypesRow public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -1064,5 +1067,74 @@ public async Task> GetAuthorsByBookName(GetAuthors } } } + + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateExtendedBiosSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + } } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 5025d042..13f6763d 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -295,6 +295,16 @@ "type": { "name": "uuid" } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } } ] }, @@ -625,6 +635,16 @@ } ] } + ], + "enums": [ + { + "name": "c_enum", + "vals": [ + "small", + "medium", + "big" + ] + } ] }, { @@ -32468,12 +32488,73 @@ ] } ] + }, + { + "name": "extended", + "tables": [ + { + "rel": { + "schema": "extended", + "name": "bios" + }, + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + } + } + ] + } + ], + "enums": [ + { + "name": "bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" + ] + } + ] } ] }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32815,6 +32896,16 @@ }, { "number": 22, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } + }, + { + "number": 23, "column": { "name": "c_cidr", "length": -1, @@ -32830,7 +32921,7 @@ } }, { - "number": 23, + "number": 24, "column": { "name": "c_inet", "length": -1, @@ -32846,7 +32937,7 @@ } }, { - "number": 24, + "number": 25, "column": { "name": "c_macaddr", "length": -1, @@ -32856,7 +32947,7 @@ } }, { - "number": 25, + "number": 26, "column": { "name": "c_macaddr8", "length": -1, @@ -33231,7 +33322,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33482,6 +33573,17 @@ }, "originalName": "c_uuid" }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + }, { "name": "c_cidr", "length": -1, @@ -35376,6 +35478,137 @@ } ], "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + }, + "originalName": "bio_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 17b5a68c..03598b2e 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner╓ч public"кpublicс +./dist/LocalRunner╖ъ public"∙publicТ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -49,7 +49,8 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuidн +c_uuid0         Rpostgres_typesbuuid/ +c_enum0         Rpostgres_typesbc_enumн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -88,7 +89,8 @@ pg_catalog timestamp name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext" pg_temp"ц▓ + description0         Rbooksbtext" +c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10209,8 +10211,15 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_no║ -М +pg_cataloginformation_schemaviewsb  yes_or_no"Пextended╘ +extendedbiosC + author_name0         Rextendedbiosb +pg_catalogvarchar< +name0         Rextendedbiosb +pg_catalogvarchar= +bio_type0         Rextendedbiosbextendedbio_type", +bio_type Autobiography BiographyMemoir· +й INSERT INTO postgres_types ( c_boolean, @@ -10234,6 +10243,7 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10261,10 +10271,11 @@ VALUES ( $19, $20, $21, - $22, + $22::c_enum, $23, - $24::macaddr, - $25::macaddr8 + $24, + $25::macaddr, + $26::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10291,10 +10302,11 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr82 Basic types : query.sqlBpostgres_typesЗ @@ -10376,8 +10388,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ -┤SELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу +└SELECT c_boolean, c_bit, c_smallint, @@ -10399,6 +10411,7 @@ c_interval*NJ c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10445,7 +10458,8 @@ pg_catalogvarcharz c_varchar"^ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10779,4 +10793,16 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sql"v1.27.0*т{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +name0         Rbooksbtextzname: query.sqlь +KINSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)CreateExtendedBio:exec*SO + author_name0         Rextendedbiosbpg_catalog.varcharz author_name*EA +name0         Rextendedbiosbpg_catalog.varcharzname*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlBextendedbiosк +QSELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1GetFirstExtendedBioByType:one"P + author_name0         Rextendedbiosb +pg_catalogvarcharz author_name"B +name0         Rextendedbiosb +pg_catalogvarcharzname"G +bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql"v1.27.0*т{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlDapperLegacyExampleGen","useDapper":true,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index 982d5a97..05a3fb90 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -1,6 +1,7 @@ // auto-generated by sqlc - do not edit using NpgsqlTypes; using System; +using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.NetworkInformation; @@ -8,9 +9,89 @@ using System.Xml; namespace NpgsqlExampleGen; -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid); +public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid, CEnum? CEnum); public readonly record struct PostgresUnstructuredType(JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); public readonly record struct Author(long Id, string Name, string? Bio); -public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); \ No newline at end of file +public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); +public readonly record struct ExtendedBio(string AuthorName, string Name, ExtendedBioType? BioType); +public enum CEnum +{ + Invalid = 0, // reserved for invalid enum value + Small = 1, + Medium = 2, + Big = 3 +} + +public static class CEnumExtensions +{ + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = CEnum.Invalid, + ["small"] = CEnum.Small, + ["medium"] = CEnum.Medium, + ["big"] = CEnum.Big + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [CEnum.Invalid] = string.Empty, + [CEnum.Small] = "small", + [CEnum.Medium] = "medium", + [CEnum.Big] = "big" + }; + public static CEnum ToCEnum(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this CEnum me) + { + return EnumToString[me]; + } + + public static HashSet ToCEnumSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } +} + +public enum ExtendedBioType +{ + Invalid = 0, // reserved for invalid enum value + Autobiography = 1, + Biography = 2, + Memoir = 3 +} + +public static class ExtendedBioTypeExtensions +{ + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = ExtendedBioType.Invalid, + ["Autobiography"] = ExtendedBioType.Autobiography, + ["Biography"] = ExtendedBioType.Biography, + ["Memoir"] = ExtendedBioType.Memoir + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBioType.Invalid] = string.Empty, + [ExtendedBioType.Autobiography] = "Autobiography", + [ExtendedBioType.Biography] = "Biography", + [ExtendedBioType.Memoir] = "Memoir" + }; + public static ExtendedBioType ToExtendedBioType(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this ExtendedBioType me) + { + return EnumToString[me]; + } + + public static HashSet ToExtendedBioTypeSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } +} \ No newline at end of file diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index ddd466b8..fe79e682 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -40,8 +40,8 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -71,6 +71,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -109,6 +110,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -161,8 +163,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -198,10 +200,11 @@ public async Task InsertPostgresTypesBatch(List ar CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CEnum = reader.IsDBNull(21) ? null : reader.GetString(21).ToCEnum(), + CCidr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), + CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) }; } } @@ -244,10 +247,11 @@ public async Task InsertPostgresTypesBatch(List ar CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CEnum = reader.IsDBNull(21) ? null : reader.GetString(21).ToCEnum(), + CCidr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), + CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), + CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) }; } } @@ -1498,4 +1502,117 @@ public async Task> GetAuthorsByBookName(GetAuthors } } } + + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public readonly record struct CreateExtendedBioArgs(string AuthorName, string Name, ExtendedBioType? BioType); + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(CreateExtendedBioSql)) + { + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateExtendedBioSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public readonly record struct GetFirstExtendedBioByTypeRow(string AuthorName, string Name, ExtendedBioType? BioType); + public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBioType? BioType); + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) + { + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetFirstExtendedBioByTypeSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() + }; + } + } + } + + return null; + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateExtendedBiosSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } \ No newline at end of file diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index 4f335f7b..a557de82 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -295,6 +295,16 @@ "type": { "name": "uuid" } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } } ] }, @@ -625,6 +635,16 @@ } ] } + ], + "enums": [ + { + "name": "c_enum", + "vals": [ + "small", + "medium", + "big" + ] + } ] }, { @@ -32468,12 +32488,73 @@ ] } ] + }, + { + "name": "extended", + "tables": [ + { + "rel": { + "schema": "extended", + "name": "bios" + }, + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + } + } + ] + } + ], + "enums": [ + { + "name": "bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" + ] + } + ] } ] }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32815,6 +32896,16 @@ }, { "number": 22, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } + }, + { + "number": 23, "column": { "name": "c_cidr", "length": -1, @@ -32830,7 +32921,7 @@ } }, { - "number": 23, + "number": 24, "column": { "name": "c_inet", "length": -1, @@ -32846,7 +32937,7 @@ } }, { - "number": 24, + "number": 25, "column": { "name": "c_macaddr", "length": -1, @@ -32856,7 +32947,7 @@ } }, { - "number": 25, + "number": 26, "column": { "name": "c_macaddr8", "length": -1, @@ -33231,7 +33322,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33482,6 +33573,17 @@ }, "originalName": "c_uuid" }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + }, { "name": "c_cidr", "length": -1, @@ -35376,6 +35478,137 @@ } ], "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + }, + "originalName": "bio_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 52fe019a..d70d6924 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunner╓ч public"кpublicс +./dist/LocalRunner╖ъ public"∙publicТ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -49,7 +49,8 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuidн +c_uuid0         Rpostgres_typesbuuid/ +c_enum0         Rpostgres_typesbc_enumн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -88,7 +89,8 @@ pg_catalog timestamp name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext" pg_temp"ц▓ + description0         Rbooksbtext" +c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10209,8 +10211,15 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_no║ -М +pg_cataloginformation_schemaviewsb  yes_or_no"Пextended╘ +extendedbiosC + author_name0         Rextendedbiosb +pg_catalogvarchar< +name0         Rextendedbiosb +pg_catalogvarchar= +bio_type0         Rextendedbiosbextendedbio_type", +bio_type Autobiography BiographyMemoir· +й INSERT INTO postgres_types ( c_boolean, @@ -10234,6 +10243,7 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10261,10 +10271,11 @@ VALUES ( $19, $20, $21, - $22, + $22::c_enum, $23, - $24::macaddr, - $25::macaddr8 + $24, + $25::macaddr, + $26::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10291,10 +10302,11 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr82 Basic types : query.sqlBpostgres_typesЗ @@ -10376,8 +10388,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ -┤SELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу +└SELECT c_boolean, c_bit, c_smallint, @@ -10399,6 +10411,7 @@ c_interval*NJ c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10445,7 +10458,8 @@ pg_catalogvarcharz c_varchar"^ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10779,4 +10793,16 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sql"v1.27.0*╧{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +name0         Rbooksbtextzname: query.sqlь +KINSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)CreateExtendedBio:exec*SO + author_name0         Rextendedbiosbpg_catalog.varcharz author_name*EA +name0         Rextendedbiosbpg_catalog.varcharzname*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlBextendedbiosк +QSELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1GetFirstExtendedBioByType:one"P + author_name0         Rextendedbiosb +pg_catalogvarcharz author_name"B +name0         Rextendedbiosb +pg_catalogvarcharzname"G +bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql"v1.27.0*╧{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"net8.0","namespaceName":"NpgsqlExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 218ef93b..42367e29 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -3,6 +3,7 @@ namespace NpgsqlLegacyExampleGen { using NpgsqlTypes; using System; + using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.NetworkInformation; @@ -36,6 +37,7 @@ public class PostgresType public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; public class PostgresUnstructuredType { @@ -79,4 +81,89 @@ public class Book public long AuthorId { get; set; } public string Description { get; set; } }; + public class ExtendedBio + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public enum CEnum + { + Invalid = 0, // reserved for invalid enum value + Small = 1, + Medium = 2, + Big = 3 + } + + public static class CEnumExtensions + { + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = CEnum.Invalid, + ["small"] = CEnum.Small, + ["medium"] = CEnum.Medium, + ["big"] = CEnum.Big + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [CEnum.Invalid] = string.Empty, + [CEnum.Small] = "small", + [CEnum.Medium] = "medium", + [CEnum.Big] = "big" + }; + public static CEnum ToCEnum(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this CEnum me) + { + return EnumToString[me]; + } + + public static HashSet ToCEnumSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } + } + + public enum ExtendedBioType + { + Invalid = 0, // reserved for invalid enum value + Autobiography = 1, + Biography = 2, + Memoir = 3 + } + + public static class ExtendedBioTypeExtensions + { + private static readonly Dictionary StringToEnum = new Dictionary() + { + [string.Empty] = ExtendedBioType.Invalid, + ["Autobiography"] = ExtendedBioType.Autobiography, + ["Biography"] = ExtendedBioType.Biography, + ["Memoir"] = ExtendedBioType.Memoir + }; + private static readonly Dictionary EnumToString = new Dictionary() + { + [ExtendedBioType.Invalid] = string.Empty, + [ExtendedBioType.Autobiography] = "Autobiography", + [ExtendedBioType.Biography] = "Biography", + [ExtendedBioType.Memoir] = "Memoir" + }; + public static ExtendedBioType ToExtendedBioType(this string me) + { + return StringToEnum[me]; + } + + public static string Stringify(this ExtendedBioType me) + { + return EnumToString[me]; + } + + public static HashSet ToExtendedBioTypeSet(this string me) + { + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + } + } } \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index c8a67469..3a93c541 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -41,7 +41,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -65,6 +65,7 @@ public class InsertPostgresTypesArgs public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -99,6 +100,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -137,6 +139,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); @@ -214,7 +217,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -238,6 +241,7 @@ public class GetPostgresTypesRow public string CBpchar { get; set; } public string CText { get; set; } public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -278,10 +282,11 @@ public async Task GetPostgresTypes() CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CEnum = reader.IsDBNull(21) ? (CEnum? )null : reader.GetString(21).ToCEnum(), + CCidr = reader.IsDBNull(22) ? (NpgsqlCidr? )null : reader.GetFieldValue(22), + CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), + CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) }; } } @@ -324,10 +329,11 @@ public async Task GetPostgresTypes() CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), CText = reader.IsDBNull(19) ? null : reader.GetString(19), CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CCidr = reader.IsDBNull(21) ? (NpgsqlCidr? )null : reader.GetFieldValue(21), - CInet = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CMacaddr = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr8 = reader.IsDBNull(24) ? null : reader.GetString(24) + CEnum = reader.IsDBNull(21) ? (CEnum? )null : reader.GetString(21).ToCEnum(), + CCidr = reader.IsDBNull(22) ? (NpgsqlCidr? )null : reader.GetFieldValue(22), + CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), + CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), + CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) }; } } @@ -1774,5 +1780,131 @@ public async Task> GetAuthorsByBookName(GetAuthors } } } + + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(CreateExtendedBioSql)) + { + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateExtendedBioSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow + { + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } + }; + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) + { + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetFirstExtendedBioByTypeSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetFirstExtendedBioByTypeRow + { + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() + }; + } + } + } + + return null; + } + + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncateExtendedBiosSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } } \ No newline at end of file diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index c91f471c..c67fd5c2 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -295,6 +295,16 @@ "type": { "name": "uuid" } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } } ] }, @@ -625,6 +635,16 @@ } ] } + ], + "enums": [ + { + "name": "c_enum", + "vals": [ + "small", + "medium", + "big" + ] + } ] }, { @@ -32468,12 +32488,73 @@ ] } ] + }, + { + "name": "extended", + "tables": [ + { + "rel": { + "schema": "extended", + "name": "bios" + }, + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + } + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + } + } + ] + } + ], + "enums": [ + { + "name": "bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" + ] + } + ] } ] }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22,\n $23,\n $24::macaddr,\n $25::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32815,6 +32896,16 @@ }, { "number": 22, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } + }, + { + "number": 23, "column": { "name": "c_cidr", "length": -1, @@ -32830,7 +32921,7 @@ } }, { - "number": 23, + "number": 24, "column": { "name": "c_inet", "length": -1, @@ -32846,7 +32937,7 @@ } }, { - "number": 24, + "number": 25, "column": { "name": "c_macaddr", "length": -1, @@ -32856,7 +32947,7 @@ } }, { - "number": 25, + "number": 26, "column": { "name": "c_macaddr8", "length": -1, @@ -33231,7 +33322,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33482,6 +33573,17 @@ }, "originalName": "c_uuid" }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + }, { "name": "c_cidr", "length": -1, @@ -35376,6 +35478,137 @@ } ], "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } + }, + { + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } + }, + { + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "author_name" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "name" + }, + { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "schema": "extended", + "name": "bio_type" + }, + "originalName": "bio_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index 491f45f0..b91b9065 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner╓ч public"кpublicс +./dist/LocalRunner╖ъ public"∙publicТ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -49,7 +49,8 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- -c_uuid0         Rpostgres_typesbuuidн +c_uuid0         Rpostgres_typesbuuid/ +c_enum0         Rpostgres_typesbc_enumн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -88,7 +89,8 @@ pg_catalog timestamp name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext" pg_temp"ц▓ + description0         Rbooksbtext" +c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10209,8 +10211,15 @@ pg_cataloginformation_schemaviewsb  yes_or_noW pg_cataloginformation_schemaviewsb  yes_or_no] is_trigger_insertable_into0         R' -pg_cataloginformation_schemaviewsb  yes_or_no║ -М +pg_cataloginformation_schemaviewsb  yes_or_no"Пextended╘ +extendedbiosC + author_name0         Rextendedbiosb +pg_catalogvarchar< +name0         Rextendedbiosb +pg_catalogvarchar= +bio_type0         Rextendedbiosbextendedbio_type", +bio_type Autobiography BiographyMemoir· +й INSERT INTO postgres_types ( c_boolean, @@ -10234,6 +10243,7 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10261,10 +10271,11 @@ VALUES ( $19, $20, $21, - $22, + $22::c_enum, $23, - $24::macaddr, - $25::macaddr8 + $24, + $25::macaddr, + $26::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10291,10 +10302,11 @@ c_interval*PL c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b macaddr82 Basic types : query.sqlBpostgres_typesЗ @@ -10376,8 +10388,8 @@ c_interval*NJ c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesЮ -┤SELECT + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу +└SELECT c_boolean, c_bit, c_smallint, @@ -10399,6 +10411,7 @@ c_interval*NJ c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -10445,7 +10458,8 @@ pg_catalogvarcharz c_varchar"^ pg_catalogvarcharzc_character_varying"; c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 c_text0         Rpostgres_typesbtextzc_text"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! @@ -10779,4 +10793,16 @@ WHERE books.name = $1GetAuthorsByBookName:many"- name0         R authorsbtextzname"( bio0         R authorsbtextzbio" books0         brbooks*.* -name0         Rbooksbtextzname: query.sql"v1.27.0*▌{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file +name0         Rbooksbtextzname: query.sqlь +KINSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)CreateExtendedBio:exec*SO + author_name0         Rextendedbiosbpg_catalog.varcharz author_name*EA +name0         Rextendedbiosbpg_catalog.varcharzname*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlBextendedbiosк +QSELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1GetFirstExtendedBioByType:one"P + author_name0         Rextendedbiosb +pg_catalogvarcharz author_name"B +name0         Rextendedbiosb +pg_catalogvarcharzname"G +bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF +bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql"v1.27.0*▌{"overrideDriverVersion":"","generateCsproj":true,"targetFramework":"netstandard2.0","namespaceName":"NpgsqlLegacyExampleGen","useDapper":false,"overrideDapperVersion":"","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"type":"int","notNull":false}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"type":"string","notNull":false}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"type":"DateTime","notNull":true}},{"column":"*:c_json_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_xml_string_override","csharp_type":{"type":"string","notNull":false}},{"column":"*:c_macaddr8","csharp_type":{"type":"string","notNull":false}}],"debugRequest":false} \ No newline at end of file diff --git a/examples/config/postgresql/authors/query.sql b/examples/config/postgresql/authors/query.sql index 32521d57..8fddebde 100644 --- a/examples/config/postgresql/authors/query.sql +++ b/examples/config/postgresql/authors/query.sql @@ -69,3 +69,12 @@ SELECT sqlc.embed(books) FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = $1; + +-- name: CreateExtendedBio :exec +INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3); + +-- name: GetFirstExtendedBioByType :one +SELECT * FROM extended.bios WHERE bio_type = $1 LIMIT 1; + +-- name: TruncateExtendedBios :exec +TRUNCATE TABLE extended.bios; \ No newline at end of file diff --git a/examples/config/postgresql/authors/schema.sql b/examples/config/postgresql/authors/schema.sql index 24dbe2b6..db1305ba 100644 --- a/examples/config/postgresql/authors/schema.sql +++ b/examples/config/postgresql/authors/schema.sql @@ -11,3 +11,14 @@ CREATE TABLE books ( description TEXT, FOREIGN KEY (author_id) REFERENCES authors (id) ON DELETE CASCADE ); + +CREATE SCHEMA extended; + +CREATE TYPE extended.bio_type AS ENUM ('Autobiography', 'Biography', 'Memoir'); + +CREATE TABLE extended.bios ( + author_name VARCHAR(100), + name VARCHAR(100), + bio_type extended.bio_type, + PRIMARY KEY (author_name, name) +); diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index f16fcb31..e418c76d 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -24,6 +24,7 @@ INSERT INTO postgres_types c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, @@ -51,6 +52,7 @@ VALUES ( sqlc.narg('c_bpchar'), sqlc.narg('c_text'), sqlc.narg('c_uuid'), + sqlc.narg('c_enum')::c_enum, sqlc.narg('c_cidr'), sqlc.narg('c_inet'), sqlc.narg('c_macaddr')::macaddr, @@ -133,6 +135,7 @@ SELECT c_bpchar, c_text, c_uuid, + c_enum, c_cidr, c_inet, c_macaddr, diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index 31b8b941..0cd1c9f1 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -1,5 +1,7 @@ CREATE EXTENSION "uuid-ossp"; +CREATE TYPE c_enum AS ENUM ('small', 'medium', 'big'); + CREATE TABLE postgres_types ( /* Numeric Data Types */ c_boolean BOOLEAN, @@ -34,7 +36,8 @@ CREATE TABLE postgres_types ( c_macaddr8 MACADDR8, /* Special Data Types */ - c_uuid UUID + c_uuid UUID, + c_enum c_enum ); CREATE TABLE postgres_unstructured_types ( From 60b5a63a24600d213df84ac8242f4b3c75e30bbd Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 16 Aug 2025 17:15:29 +0200 Subject: [PATCH 20/33] fix: remove redundant using directives --- CodeGenerator/CodeGenerator.cs | 1 - CodeGenerator/Generators/ModelsGen.cs | 1 - CodegenTests/CodegenTypeOverrideTests.cs | 1 - Drivers/Generators/CommonGen.cs | 1 - Drivers/QueryAnnotations.cs | 1 - 5 files changed, 5 deletions(-) diff --git a/CodeGenerator/CodeGenerator.cs b/CodeGenerator/CodeGenerator.cs index 7cefc15d..d9b8c664 100644 --- a/CodeGenerator/CodeGenerator.cs +++ b/CodeGenerator/CodeGenerator.cs @@ -9,7 +9,6 @@ using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Enum = Plugin.Enum; namespace SqlcGenCsharp; diff --git a/CodeGenerator/Generators/ModelsGen.cs b/CodeGenerator/Generators/ModelsGen.cs index 549738ab..47ca7132 100644 --- a/CodeGenerator/Generators/ModelsGen.cs +++ b/CodeGenerator/Generators/ModelsGen.cs @@ -2,7 +2,6 @@ using Plugin; using SqlcGenCsharp.Drivers; using System.Collections.Generic; -using System.Collections.Immutable; using System.Linq; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; diff --git a/CodegenTests/CodegenTypeOverrideTests.cs b/CodegenTests/CodegenTypeOverrideTests.cs index afe70395..9fd2ff65 100644 --- a/CodegenTests/CodegenTypeOverrideTests.cs +++ b/CodegenTests/CodegenTypeOverrideTests.cs @@ -2,7 +2,6 @@ using Plugin; using SqlcGenCsharp; using System.Text; -using System.Xml; namespace CodegenTests; diff --git a/Drivers/Generators/CommonGen.cs b/Drivers/Generators/CommonGen.cs index e295e0ed..6a1bc25f 100644 --- a/Drivers/Generators/CommonGen.cs +++ b/Drivers/Generators/CommonGen.cs @@ -1,5 +1,4 @@ using Plugin; -using System; using System.Collections.Generic; using System.Linq; diff --git a/Drivers/QueryAnnotations.cs b/Drivers/QueryAnnotations.cs index 65531a47..8975a544 100644 --- a/Drivers/QueryAnnotations.cs +++ b/Drivers/QueryAnnotations.cs @@ -1,6 +1,5 @@ using Microsoft.CodeAnalysis.CSharp.Syntax; using Plugin; -using System.Collections.Generic; namespace SqlcGenCsharp.Drivers; From c24685926600b5c6d9bc30c859d5ddbdc65c7be7 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 16 Aug 2025 22:07:42 +0200 Subject: [PATCH 21/33] fix: enum logic for mysql --- CodeGenerator/Generators/ModelsGen.cs | 3 +- CodegenTests/CodegenSchemaTests.cs | 4 +- Drivers/DbDriver.cs | 76 +++++++++--------- Drivers/MySqlConnectorDriver.cs | 77 ++++++++----------- Drivers/NpgsqlDriver.cs | 58 ++++++++++---- Drivers/SqliteDriver.cs | 9 ++- Extensions/StringExtensions.cs | 66 ++++++++-------- .../EndToEndScaffold/Templates/MySqlTests.cs | 10 +-- .../MySqlConnectorDapperTester.generated.cs | 12 +-- .../MySqlConnectorTester.generated.cs | 12 +-- .../MySqlConnectorDapperTester.generated.cs | 12 +-- .../MySqlConnectorTester.generated.cs | 12 +-- .../MySqlConnectorDapperExample/Models.cs | 68 ++++++++-------- .../MySqlConnectorDapperExample/QuerySql.cs | 10 +-- examples/MySqlConnectorDapperExample/Utils.cs | 12 +-- .../Models.cs | 68 ++++++++-------- .../QuerySql.cs | 10 +-- .../Utils.cs | 12 +-- examples/MySqlConnectorExample/Models.cs | 66 ++++++++-------- examples/MySqlConnectorExample/QuerySql.cs | 14 ++-- .../MySqlConnectorLegacyExample/Models.cs | 68 ++++++++-------- .../MySqlConnectorLegacyExample/QuerySql.cs | 18 ++--- 22 files changed, 349 insertions(+), 348 deletions(-) diff --git a/CodeGenerator/Generators/ModelsGen.cs b/CodeGenerator/Generators/ModelsGen.cs index 47ca7132..e13e5bf2 100644 --- a/CodeGenerator/Generators/ModelsGen.cs +++ b/CodeGenerator/Generators/ModelsGen.cs @@ -16,7 +16,6 @@ internal class ModelsGen(DbDriver dbDriver, string namespaceName) private DataClassesGen DataClassesGen { get; } = new(dbDriver); - private EnumsGen EnumsGen { get; } = new(dbDriver); public File GenerateFile( @@ -58,7 +57,7 @@ private MemberDeclarationSyntax[] GenerateEnums(Dictionary { - var enumName = e.Value.Name.ToModelName(s.Key, dbDriver.DefaultSchema); + var enumName = dbDriver.EnumToModelName(s.Key, e.Value); return EnumsGen.Generate(enumName, e.Value.Vals); }); }).ToArray(); diff --git a/CodegenTests/CodegenSchemaTests.cs b/CodegenTests/CodegenSchemaTests.cs index 7184843e..57254c05 100644 --- a/CodegenTests/CodegenSchemaTests.cs +++ b/CodegenTests/CodegenSchemaTests.cs @@ -47,8 +47,8 @@ public void TestSchemaScopedEnum() var expected = new HashSet { "DummySchemaDummyTable", - "DummySchemaDummyTableDummyColumn", - "DummySchemaDummyTableDummyColumnExtensions" + "DummyTableDummyColumn", + "DummyTableDummyColumnExtensions" }; var actual = GetMemberNames(modelsCode); Assert.That(actual.IsSupersetOf(expected)); diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index 2fccdb5c..ba8ba5bd 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -100,35 +100,45 @@ protected DbDriver( foreach (var schemaEnums in Enums) foreach (var e in schemaEnums.Value) - { NullableTypes.Add(e.Key.ToModelName(schemaEnums.Key, DefaultSchema)); - } if (!Options.DotnetFramework.IsDotnetCore()) return; foreach (var t in NullableTypesInDotnetCore) - { NullableTypes.Add(t); - } } - private readonly HashSet _excludedSchemas = + private static readonly HashSet _excludedSchemas = [ "pg_catalog", "information_schema" ]; - private Dictionary> ConstructTablesLookup(Catalog catalog) + private static Dictionary> ConstructTablesLookup(Catalog catalog) { return catalog.Schemas .Where(s => !_excludedSchemas.Contains(s.Name)) .ToDictionary( s => s.Name == catalog.DefaultSchema ? string.Empty : s.Name, - s => s.Tables.ToDictionary(t => t.Rel.Name, t => t)); + s => s.Tables.ToDictionary(t => t.Rel.Name, t => t) + ); } - protected abstract Dictionary> ConstructEnumsLookup(Catalog catalog); + private static Dictionary> ConstructEnumsLookup(Catalog catalog) + { + return catalog + .Schemas + .SelectMany(s => s.Enums.Select(e => new { EnumItem = e, Schema = s.Name })) + .GroupBy(x => x.Schema == catalog.DefaultSchema ? string.Empty : x.Schema) + .ToDictionary( + group => group.Key, + group => group.ToDictionary( + x => x.EnumItem.Name, + x => x.EnumItem + ) + ); + } public virtual ISet GetUsingDirectivesForQueries() { @@ -217,13 +227,6 @@ public static void ConfigureSqlMapper() """)!]; } - protected string GetColumnSchema(Column column) - { - if (column.Table == null) - return string.Empty; - return column.Table.Schema == DefaultSchema ? string.Empty : column.Table.Schema; - } - public abstract string TransformQueryText(Query query); public abstract ConnectionGenCommands EstablishConnection(Query query); @@ -342,8 +345,8 @@ protected string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) if (string.IsNullOrEmpty(column.Type.Name)) return "object"; - if (GetEnumType(column) is { } enumType) - return EnumToCsharpTypeName(column, enumType); + if (GetEnumType(column) is not null) + return EnumToCsharpDataType(column); if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) return csharpType.Type; @@ -354,8 +357,7 @@ protected string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) if (column.IsArray || column.IsSqlcSlice) return $"{columnMapping.Key}[]"; return columnMapping.Key; } - - throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); + throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name} in {GetType().Name}"); } private static bool DoesColumnMappingApply(ColumnMapping columnMapping, Column column) @@ -387,20 +389,20 @@ private string GetColumnReader(CsharpTypeOption csharpTypeOption, int ordinal) throw new NotSupportedException($"Could not find column mapping for type override: {csharpTypeOption.Type}"); } - private string GetEnumReader(Column column, int ordinal, Plugin.Enum enumType) + private string GetEnumReader(Column column, int ordinal) { - var enumName = column.Type.Name.ToModelName(column.Table.Schema, DefaultSchema); - var fullEnumType = EnumToCsharpTypeName(column, enumType); + var enumName = EnumToModelName(column); + var enumDataType = EnumToCsharpDataType(column); var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; - if (fullEnumType.StartsWith("HashSet")) - return $"{readStmt}.To{enumName}Set()"; - return $"{readStmt}.To{enumName}()"; + return enumDataType.StartsWith("HashSet") + ? $"{readStmt}.To{enumName}Set()" + : $"{readStmt}.To{enumName}()"; } public string GetColumnReader(Column column, int ordinal, Query? query) { - if (GetEnumType(column) is { } enumType) - return GetEnumReader(column, ordinal, enumType); + if (GetEnumType(column) is not null) + return GetEnumReader(column, ordinal); if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) return GetColumnReader(csharpType, ordinal); @@ -412,21 +414,15 @@ public string GetColumnReader(Column column, int ordinal, Query? query) return columnMapping.ReaderArrayFn?.Invoke(ordinal) ?? throw new InvalidOperationException("ReaderArrayFn is null"); return columnMapping.ReaderFn(ordinal); } - throw new NotSupportedException($"Column {column.Name} has unsupported column type: {column.Type.Name}"); + throw new NotSupportedException($"column {column.Name} has unsupported column type: {column.Type.Name} in {GetType().Name}"); } /* Enum methods*/ - protected Plugin.Enum? GetEnumType(Column column) - { - var schemaName = GetColumnSchema(column); - if (!Enums.TryGetValue(schemaName, value: out var enumsInSchema)) - return null; - var enumNameWithoutSchema = column.Type.Name.Replace($"{schemaName}.", ""); - return enumsInSchema.GetValueOrDefault(enumNameWithoutSchema); - } + protected abstract Plugin.Enum? GetEnumType(Column column); - protected virtual string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) - { - return column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); - } + protected abstract string EnumToCsharpDataType(Column column); + + public abstract string EnumToModelName(string schemaName, Plugin.Enum enumType); + + protected abstract string EnumToModelName(Column column); } \ No newline at end of file diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index c0348487..58336ef7 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -3,10 +3,10 @@ using SqlcGenCsharp.Drivers.Generators; using System; using System.Collections.Generic; -using System.IO; using System.Linq; using System.Text.RegularExpressions; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; +using Enum = Plugin.Enum; namespace SqlcGenCsharp.Drivers; @@ -248,7 +248,7 @@ protected override ISet GetConfigureSqlMappings() .Where(IsSetDataType) .Select(c => { - var enumName = c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema); + var enumName = EnumToModelName(c); return $"SqlMapper.AddTypeHandler(typeof(HashSet<{enumName}>), new {enumName}TypeHandler());"; }) .Distinct(); @@ -285,7 +285,7 @@ public override void SetValue(IDbDataParameter parameter, HashSet<{{x}}> value) var enumType = GetEnumType(c); return enumType is not null && IsSetDataType(c, enumType); }) - .Select(c => setTypeHandlerFunc(c.Type.Name.ToModelName(GetColumnSchema(c), DefaultSchema))) + .Select(c => setTypeHandlerFunc(EnumToModelName(c))) .Distinct() .Select(m => ParseMemberDeclaration(m)!) .ToArray(); @@ -309,7 +309,7 @@ public override MemberDeclarationSyntax[] GetMemberDeclarationsForUtils() { if (!IsSetDataType(p.Column)) continue; - var enumName = p.Column.Type.Name.ToModelName(GetColumnSchema(p.Column), DefaultSchema); + var enumName = EnumToModelName(p.Column); memberDeclarations = memberDeclarations.AddRangeExcludeNulls([ParseMemberDeclaration(SetCsvConverterFunc(enumName))!]); } } @@ -497,7 +497,7 @@ public string GetCopyFromImpl(Query query, string queryTextConstant) """; } - private readonly ISet BoolAndByteTypes = new HashSet + private readonly ISet _boolAndByteTypes = new HashSet { "bool", "byte", @@ -541,7 +541,7 @@ private ISet GetCsvNullConverters(Query query) { var csharpType = GetCsharpTypeWithoutNullableSuffix(p.Column, query); if ( - !BoolAndByteTypes.Contains(csharpType) && + !_boolAndByteTypes.Contains(csharpType) && !IsSetDataType(p.Column) && TypeExistsInQuery(csharpType, query)) { @@ -560,7 +560,7 @@ private ISet GetSetConverters(Query query) if (!IsSetDataType(p.Column)) continue; - var enumName = p.Column.Type.Name.ToModelName(GetColumnSchema(p.Column), DefaultSchema); + var enumName = EnumToModelName(p.Column); var csvWriterVar = Variable.CsvWriter.AsVarName(); converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"HashSet<{enumName}>", true)}>(new Utils.{enumName}CsvConverter());"); converters.Add($"{csvWriterVar}.Context.TypeConverterCache.AddConverter<{AddNullableSuffixIfNeeded($"HashSet<{enumName}>", false)}>(new Utils.{enumName}CsvConverter());"); @@ -569,13 +569,7 @@ private ISet GetSetConverters(Query query) } /* Enum methods */ - protected override string EnumToCsharpTypeName(Column column, Plugin.Enum enumType) - { - var enumName = column.Type.Name.ToModelName(GetColumnSchema(column), DefaultSchema); - return IsSetDataType(column, enumType) ? $"HashSet<{enumName}>" : enumName; - } - - private static bool IsSetDataType(Column column, Plugin.Enum enumType) + private static bool IsSetDataType(Column column, Enum enumType) { return column.Length > enumType.Vals.Select(v => v.Length).Sum(); } @@ -586,38 +580,6 @@ private bool IsSetDataType(Column column) return enumType is not null && IsSetDataType(column, enumType); } - protected override Dictionary> ConstructEnumsLookup(Catalog catalog) - { - var defaultSchemaCatalog = catalog.Schemas.First(s => s.Name == catalog.DefaultSchema); - return defaultSchemaCatalog.Enums - .Select(e => new - { - EnumItem = e, - Schema = FindEnumSchema(e) - }) - .GroupBy(x => x.Schema) - .ToDictionary( - group => group.Key, - group => group.ToDictionary( - x => x.EnumItem.Name, - x => x.EnumItem) - ); - } - - private string FindEnumSchema(Plugin.Enum e) - { - foreach (var schemaTables in Tables) - { - foreach (var table in schemaTables.Value) - { - var isEnumColumn = table.Value.Columns.Any(c => c.Type.Name == e.Name); - if (isEnumColumn) - return schemaTables.Key; - } - } - throw new InvalidDataException($"No enum {e.Name} schema found."); - } - public override Func? GetWriterFn(Column column, Query query) { var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); @@ -638,4 +600,27 @@ private string FindEnumSchema(Plugin.Enum e) static string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; return Options.UseDapper ? null : DefaultWriterFn; } + + protected override Enum? GetEnumType(Column column) + { + if (!Enums.TryGetValue(string.Empty, value: out var enumsInSchema)) + return null; + return enumsInSchema.GetValueOrDefault(column.Type.Name); + } + + protected override string EnumToCsharpDataType(Column column) + { + var enumName = EnumToModelName(column); + return IsSetDataType(column) ? $"HashSet<{enumName}>" : enumName; + } + + public override string EnumToModelName(string _, Enum enumType) + { + return enumType.Name.ToPascalCase(); + } + + protected override string EnumToModelName(Column column) + { + return column.Type.Name.ToPascalCase(); + } } \ No newline at end of file diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index f3592421..e207ee47 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -6,6 +6,7 @@ using System.Linq; using System.Text.RegularExpressions; using static Microsoft.CodeAnalysis.CSharp.SyntaxFactory; +using Enum = Plugin.Enum; namespace SqlcGenCsharp.Drivers; @@ -27,7 +28,7 @@ public NpgsqlDriver( } } - protected sealed override Dictionary ColumnMappings { get; } = + protected override Dictionary ColumnMappings { get; } = new() { /* Numeric data types */ @@ -532,21 +533,6 @@ string AddRowsToCopyCommand() } } - protected override Dictionary> ConstructEnumsLookup(Catalog catalog) - { - return catalog - .Schemas - .SelectMany(s => s.Enums.Select(e => new { EnumItem = e, Schema = s.Name })) - .GroupBy(x => x.Schema == catalog.DefaultSchema ? string.Empty : x.Schema) - .ToDictionary( - group => group.Key, - group => group.ToDictionary( - x => x.EnumItem.Name, - x => x.EnumItem - ) - ); - } - public override Func? GetWriterFn(Column column, Query query) { var csharpType = GetCsharpTypeWithoutNullableSuffix(column, query); @@ -554,7 +540,7 @@ string AddRowsToCopyCommand() if (writerFn is not null) return writerFn; - if (GetEnumType(column) is { } enumType) + if (GetEnumType(column) is not null) { return (el, notNull, isDapper) => { @@ -569,4 +555,42 @@ string AddRowsToCopyCommand() static string DefaultWriterFn(string el, bool notNull, bool isDapper) => notNull ? el : $"{el} ?? (object)DBNull.Value"; return Options.UseDapper ? null : DefaultWriterFn; } + + private static (string, string) GetEnumSchemaAndName(Column column) + { + var schemaName = column.Type.Schema; + var enumName = column.Type.Name; + if (schemaName == string.Empty && enumName.Contains('.')) + { + var schemaAndEnum = enumName.Split('.'); + schemaName = schemaAndEnum[0]; + enumName = schemaAndEnum[1]; + } + return (schemaName, enumName); + } + + protected override Enum? GetEnumType(Column column) + { + var (schemaName, enumName) = GetEnumSchemaAndName(column); + if (!Enums.TryGetValue(schemaName, value: out var enumsInSchema)) + return null; + return enumsInSchema.GetValueOrDefault(enumName); + } + + protected override string EnumToCsharpDataType(Column column) + { + var (schemaName, enumName) = GetEnumSchemaAndName(column); + return $"{schemaName}.{enumName}".ToPascalCase(); + } + + public override string EnumToModelName(string schemaName, Enum enumType) + { + return $"{schemaName}.{enumType.Name}".ToPascalCase(); + } + + protected override string EnumToModelName(Column column) + { + var (schemaName, enumName) = GetEnumSchemaAndName(column); + return $"{schemaName}.{enumName}".ToPascalCase(); + } } \ No newline at end of file diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index f2954994..2289c833 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -230,8 +230,9 @@ string AddParametersToCommand() } } - protected override Dictionary> ConstructEnumsLookup(Catalog catalog) - { - return []; - } + // Unsupported implementations of enum related logic - TODO refactor + protected override Enum? GetEnumType(Column column) => null; + protected override string EnumToCsharpDataType(Column column) => string.Empty; + public override string EnumToModelName(string schemaName, Enum enumType) => string.Empty; + protected override string EnumToModelName(Column column) => string.Empty; } \ No newline at end of file diff --git a/Extensions/StringExtensions.cs b/Extensions/StringExtensions.cs index 0aa0cd58..40ea9425 100644 --- a/Extensions/StringExtensions.cs +++ b/Extensions/StringExtensions.cs @@ -1,32 +1,42 @@ +using System.Globalization; +using System.Text; using System.Text.RegularExpressions; namespace SqlcGenCsharp; public static partial class StringExtensions { + [GeneratedRegex(@"[A-Za-z0-9]+")] + private static partial Regex WordRegex(); + + [GeneratedRegex(@"[^A-Za-z0-9]+")] + private static partial Regex NoWordRegex(); + public static string ToPascalCase(this string value) { - var invalidCharsRgx = InvalidCharsRegex(); - var whiteSpace = WhiteSpaceRegex(); - var startsWithLowerCaseChar = StartsWithLowerCaseCharRegex(); - var firstCharFollowedByUpperCasesOnly = FirstCharFollowedByUpperCasesOnlyRegex(); - var lowerCaseNextToNumber = LowerCaseNextToNumberRegex(); - var upperCaseInside = UpperCaseInsideRegex(); - - // replace white spaces with undescore, then replace all invalid chars with empty string - var pascalCase = invalidCharsRgx.Replace(whiteSpace.Replace(value, "_"), string.Empty) - // split by underscores - .Split(["_"], StringSplitOptions.RemoveEmptyEntries) - // set first letter to uppercase - .Select(w => startsWithLowerCaseChar.Replace(w, m => m.Value.ToUpper())) - // replace second and all following upper case letters to lower if there is no next lower (ABC -> Abc) - .Select(w => firstCharFollowedByUpperCasesOnly.Replace(w, m => m.Value.ToLower())) - // set upper case the first lower case following a number (Ab9cd -> Ab9Cd) - .Select(w => lowerCaseNextToNumber.Replace(w, m => m.Value.ToUpper())) - // lower second and next upper case letters except the last if it follows by any lower (ABcDEf -> AbcDef) - .Select(w => upperCaseInside.Replace(w, m => m.Value.ToLower())); - - return string.Concat(pascalCase); + if (string.IsNullOrWhiteSpace(value)) + return string.Empty; + string cleaned = NoWordRegex().Replace(value, " "); + + var sb = new StringBuilder(); + foreach (Match match in WordRegex().Matches(cleaned)) + { + var word = match.Value; + if (word.Length == 0) + continue; + + if (word.Length == 1) + { + sb.Append(CultureInfo.InvariantCulture.TextInfo.ToUpper(word)); + continue; + } + + sb.Append(char.ToUpperInvariant(word[0])); + if (word.Length > 1) + sb.Append(word[1..]); + } + + return sb.ToString(); } public static string ToCamelCase(this string value) @@ -38,7 +48,6 @@ public static string ToCamelCase(this string value) public static string ToModelName(this string value, string schema, string defaultSchema) { var schemaName = schema == defaultSchema ? string.Empty : schema; - value = value.Replace($"{schemaName}.", ""); return $"{schemaName}_{value.TrimEnd('s')}".ToPascalCase(); // TODO implement better way to turn words to singular } @@ -46,17 +55,4 @@ public static string AppendSemicolonUnlessEmpty(this string input) { return input == string.Empty ? "" : $"{input};"; } - - [GeneratedRegex("[^_a-zA-Z0-9]")] - private static partial Regex InvalidCharsRegex(); - [GeneratedRegex(@"(?<=\s)")] - private static partial Regex WhiteSpaceRegex(); - [GeneratedRegex("^[a-z]")] - private static partial Regex StartsWithLowerCaseCharRegex(); - [GeneratedRegex("(?<=[A-Z])[A-Z0-9]+$")] - private static partial Regex FirstCharFollowedByUpperCasesOnlyRegex(); - [GeneratedRegex("(?<=[0-9])[a-z]")] - private static partial Regex LowerCaseNextToNumberRegex(); - [GeneratedRegex("(?<=[A-Z])[A-Z]+?((?=[A-Z][a-z])|(?=[0-9]))")] - private static partial Regex UpperCaseInsideRegex(); } \ No newline at end of file diff --git a/end2end/EndToEndScaffold/Templates/MySqlTests.cs b/end2end/EndToEndScaffold/Templates/MySqlTests.cs index 8039df79..d5745d34 100644 --- a/end2end/EndToEndScaffold/Templates/MySqlTests.cs +++ b/end2end/EndToEndScaffold/Templates/MySqlTests.cs @@ -724,20 +724,20 @@ await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = {{Consts.BojackAuthor}}, Name = {{Consts.BojackBookTitle}}, - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = {{Consts.BojackAuthor}}, Name = {{Consts.BojackBookTitle}}, - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }; var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { - BioType = ExtendedBiosBioType.Memoir + BioType = BiosBioType.Memoir }); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index a52670d2..a4a8c48d 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -574,19 +574,19 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { - ExtendedBiosAuthorType.Author, - ExtendedBiosAuthorType.Translator + BiosAuthorType.Author, + BiosAuthorType.Translator } }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) { diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index 4ed009c2..ebe04e3a 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -574,19 +574,19 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { - ExtendedBiosAuthorType.Author, - ExtendedBiosAuthorType.Translator + BiosAuthorType.Author, + BiosAuthorType.Translator } }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); AssertSingularEquals(expected, actual.Value); void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) { diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index 687a2ea0..c49c6496 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -574,19 +574,19 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { - ExtendedBiosAuthorType.Author, - ExtendedBiosAuthorType.Translator + BiosAuthorType.Author, + BiosAuthorType.Translator } }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) { diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index 4afc4535..54ba9842 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -574,19 +574,19 @@ void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunc [Test] public async Task TestMySqlScopedSchemaEnum() { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = ExtendedBiosBioType.Memoir, AuthorType = new HashSet { ExtendedBiosAuthorType.Author, ExtendedBiosAuthorType.Translator } }); + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); var expected = new QuerySql.GetFirstExtendedBioByTypeRow { AuthorName = "Bojack Horseman", Name = "One Trick Pony", - BioType = ExtendedBiosBioType.Memoir, - AuthorType = new HashSet + BioType = BiosBioType.Memoir, + AuthorType = new HashSet { - ExtendedBiosAuthorType.Author, - ExtendedBiosAuthorType.Translator + BiosAuthorType.Author, + BiosAuthorType.Translator } }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = ExtendedBiosBioType.Memoir }); + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); AssertSingularEquals(expected, actual); void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) { diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index fb0ee0ab..e3abdf1c 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -73,8 +73,8 @@ public class ExtendedBio { public string? AuthorName { get; init; } public string? Name { get; init; } - public ExtendedBiosBioType? BioType { get; init; } - public HashSet? AuthorType { get; init; } + public BiosBioType? BioType { get; init; } + public HashSet? AuthorType { get; init; } }; public enum MysqlStringTypesCEnum { @@ -156,7 +156,7 @@ public static HashSet ToMysqlStringTypesCSetSet(this strin } } -public enum ExtendedBiosBioType +public enum BiosBioType { Invalid = 0, // reserved for invalid enum value Autobiography = 1, @@ -164,39 +164,39 @@ public enum ExtendedBiosBioType Memoir = 3 } -public static class ExtendedBiosBioTypeExtensions +public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosBioType.Invalid, - ["Autobiography"] = ExtendedBiosBioType.Autobiography, - ["Biography"] = ExtendedBiosBioType.Biography, - ["Memoir"] = ExtendedBiosBioType.Memoir + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosBioType.Invalid] = string.Empty, - [ExtendedBiosBioType.Autobiography] = "Autobiography", - [ExtendedBiosBioType.Biography] = "Biography", - [ExtendedBiosBioType.Memoir] = "Memoir" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosBioType me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosBioTypeSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum ExtendedBiosAuthorType +public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value Author = 1, @@ -204,34 +204,34 @@ public enum ExtendedBiosAuthorType Translator = 3 } -public static class ExtendedBiosAuthorTypeExtensions +public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosAuthorType.Invalid, - ["Author"] = ExtendedBiosAuthorType.Author, - ["Editor"] = ExtendedBiosAuthorType.Editor, - ["Translator"] = ExtendedBiosAuthorType.Translator + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosAuthorType.Invalid] = string.Empty, - [ExtendedBiosAuthorType.Author] = "Author", - [ExtendedBiosAuthorType.Editor] = "Editor", - [ExtendedBiosAuthorType.Translator] = "Translator" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosAuthorType me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosAuthorTypeSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/QuerySql.cs b/examples/MySqlConnectorDapperExample/QuerySql.cs index 7bd88a5a..1f610152 100644 --- a/examples/MySqlConnectorDapperExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperExample/QuerySql.cs @@ -494,8 +494,8 @@ public class CreateExtendedBioArgs { public string? AuthorName { get; init; } public string? Name { get; init; } - public ExtendedBiosBioType? BioType { get; init; } - public HashSet? AuthorType { get; init; } + public BiosBioType? BioType { get; init; } + public HashSet? AuthorType { get; init; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -521,12 +521,12 @@ public class GetFirstExtendedBioByTypeRow { public string? AuthorName { get; init; } public string? Name { get; init; } - public ExtendedBiosBioType? BioType { get; init; } - public HashSet? AuthorType { get; init; } + public BiosBioType? BioType { get; init; } + public HashSet? AuthorType { get; init; } }; public class GetFirstExtendedBioByTypeArgs { - public ExtendedBiosBioType? BioType { get; init; } + public BiosBioType? BioType { get; init; } }; public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { diff --git a/examples/MySqlConnectorDapperExample/Utils.cs b/examples/MySqlConnectorDapperExample/Utils.cs index 3486b103..f8bb27fe 100644 --- a/examples/MySqlConnectorDapperExample/Utils.cs +++ b/examples/MySqlConnectorDapperExample/Utils.cs @@ -29,7 +29,7 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new BiosAuthorTypeTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); } @@ -39,16 +39,16 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class ExtendedBiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> + private class BiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToExtendedBiosAuthorTypeSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } diff --git a/examples/MySqlConnectorDapperLegacyExample/Models.cs b/examples/MySqlConnectorDapperLegacyExample/Models.cs index fffbc9ad..d47b0ec7 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Models.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Models.cs @@ -74,8 +74,8 @@ public class ExtendedBio { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public enum MysqlStringTypesCEnum { @@ -157,7 +157,7 @@ public static HashSet ToMysqlStringTypesCSetSet(this strin } } - public enum ExtendedBiosBioType + public enum BiosBioType { Invalid = 0, // reserved for invalid enum value Autobiography = 1, @@ -165,39 +165,39 @@ public enum ExtendedBiosBioType Memoir = 3 } - public static class ExtendedBiosBioTypeExtensions + public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosBioType.Invalid, - ["Autobiography"] = ExtendedBiosBioType.Autobiography, - ["Biography"] = ExtendedBiosBioType.Biography, - ["Memoir"] = ExtendedBiosBioType.Memoir + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosBioType.Invalid] = string.Empty, - [ExtendedBiosBioType.Autobiography] = "Autobiography", - [ExtendedBiosBioType.Biography] = "Biography", - [ExtendedBiosBioType.Memoir] = "Memoir" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosBioType me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosBioTypeSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum ExtendedBiosAuthorType + public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value Author = 1, @@ -205,35 +205,35 @@ public enum ExtendedBiosAuthorType Translator = 3 } - public static class ExtendedBiosAuthorTypeExtensions + public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosAuthorType.Invalid, - ["Author"] = ExtendedBiosAuthorType.Author, - ["Editor"] = ExtendedBiosAuthorType.Editor, - ["Translator"] = ExtendedBiosAuthorType.Translator + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosAuthorType.Invalid] = string.Empty, - [ExtendedBiosAuthorType.Author] = "Author", - [ExtendedBiosAuthorType.Editor] = "Editor", - [ExtendedBiosAuthorType.Translator] = "Translator" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosAuthorType me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosAuthorTypeSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 601ceed2..45d502b5 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -1218,8 +1218,8 @@ public class CreateExtendedBioArgs { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -1245,12 +1245,12 @@ public class GetFirstExtendedBioByTypeRow { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public class GetFirstExtendedBioByTypeArgs { - public ExtendedBiosBioType? BioType { get; set; } + public BiosBioType? BioType { get; set; } }; public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { diff --git a/examples/MySqlConnectorDapperLegacyExample/Utils.cs b/examples/MySqlConnectorDapperLegacyExample/Utils.cs index 88c13183..f377d68b 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Utils.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Utils.cs @@ -31,7 +31,7 @@ public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new ExtendedBiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new BiosAuthorTypeTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -55,16 +55,16 @@ public override void SetValue(IDbDataParameter parameter, HashSet> + private class BiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToExtendedBiosAuthorTypeSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } diff --git a/examples/MySqlConnectorExample/Models.cs b/examples/MySqlConnectorExample/Models.cs index 6c575cd9..88824602 100644 --- a/examples/MySqlConnectorExample/Models.cs +++ b/examples/MySqlConnectorExample/Models.cs @@ -11,7 +11,7 @@ namespace MySqlConnectorExampleGen; public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); -public readonly record struct ExtendedBio(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); +public readonly record struct ExtendedBio(string? AuthorName, string? Name, BiosBioType? BioType, HashSet? AuthorType); public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value @@ -92,7 +92,7 @@ public static HashSet ToMysqlStringTypesCSetSet(this strin } } -public enum ExtendedBiosBioType +public enum BiosBioType { Invalid = 0, // reserved for invalid enum value Autobiography = 1, @@ -100,39 +100,39 @@ public enum ExtendedBiosBioType Memoir = 3 } -public static class ExtendedBiosBioTypeExtensions +public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosBioType.Invalid, - ["Autobiography"] = ExtendedBiosBioType.Autobiography, - ["Biography"] = ExtendedBiosBioType.Biography, - ["Memoir"] = ExtendedBiosBioType.Memoir + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosBioType.Invalid] = string.Empty, - [ExtendedBiosBioType.Autobiography] = "Autobiography", - [ExtendedBiosBioType.Biography] = "Biography", - [ExtendedBiosBioType.Memoir] = "Memoir" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosBioType me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosBioTypeSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum ExtendedBiosAuthorType +public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value Author = 1, @@ -140,34 +140,34 @@ public enum ExtendedBiosAuthorType Translator = 3 } -public static class ExtendedBiosAuthorTypeExtensions +public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosAuthorType.Invalid, - ["Author"] = ExtendedBiosAuthorType.Author, - ["Editor"] = ExtendedBiosAuthorType.Editor, - ["Translator"] = ExtendedBiosAuthorType.Translator + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosAuthorType.Invalid] = string.Empty, - [ExtendedBiosAuthorType.Author] = "Author", - [ExtendedBiosAuthorType.Editor] = "Editor", - [ExtendedBiosAuthorType.Translator] = "Translator" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosAuthorType me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosAuthorTypeSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/QuerySql.cs b/examples/MySqlConnectorExample/QuerySql.cs index b7e7e85d..0caaf50b 100644 --- a/examples/MySqlConnectorExample/QuerySql.cs +++ b/examples/MySqlConnectorExample/QuerySql.cs @@ -631,7 +631,7 @@ public async Task> GetAuthorsByBookName(GetAuthors } private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); + public readonly record struct CreateExtendedBioArgs(string? AuthorName, string? Name, BiosBioType? BioType, HashSet? AuthorType); public async Task CreateExtendedBio(CreateExtendedBioArgs args) { if (this.Transaction == null) @@ -667,8 +667,8 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) } private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, ExtendedBiosBioType? BioType, HashSet? AuthorType); - public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBiosBioType? BioType); + public readonly record struct GetFirstExtendedBioByTypeRow(string? AuthorName, string? Name, BiosBioType? BioType, HashSet? AuthorType); + public readonly record struct GetFirstExtendedBioByTypeArgs(BiosBioType? BioType); public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { if (this.Transaction == null) @@ -687,8 +687,8 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) { AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } @@ -713,8 +713,8 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) { AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } diff --git a/examples/MySqlConnectorLegacyExample/Models.cs b/examples/MySqlConnectorLegacyExample/Models.cs index 51861f59..5df57baa 100644 --- a/examples/MySqlConnectorLegacyExample/Models.cs +++ b/examples/MySqlConnectorLegacyExample/Models.cs @@ -74,8 +74,8 @@ public class ExtendedBio { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public enum MysqlStringTypesCEnum { @@ -157,7 +157,7 @@ public static HashSet ToMysqlStringTypesCSetSet(this strin } } - public enum ExtendedBiosBioType + public enum BiosBioType { Invalid = 0, // reserved for invalid enum value Autobiography = 1, @@ -165,39 +165,39 @@ public enum ExtendedBiosBioType Memoir = 3 } - public static class ExtendedBiosBioTypeExtensions + public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosBioType.Invalid, - ["Autobiography"] = ExtendedBiosBioType.Autobiography, - ["Biography"] = ExtendedBiosBioType.Biography, - ["Memoir"] = ExtendedBiosBioType.Memoir + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosBioType.Invalid] = string.Empty, - [ExtendedBiosBioType.Autobiography] = "Autobiography", - [ExtendedBiosBioType.Biography] = "Biography", - [ExtendedBiosBioType.Memoir] = "Memoir" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static ExtendedBiosBioType ToExtendedBiosBioType(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosBioType me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosBioTypeSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum ExtendedBiosAuthorType + public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value Author = 1, @@ -205,35 +205,35 @@ public enum ExtendedBiosAuthorType Translator = 3 } - public static class ExtendedBiosAuthorTypeExtensions + public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = ExtendedBiosAuthorType.Invalid, - ["Author"] = ExtendedBiosAuthorType.Author, - ["Editor"] = ExtendedBiosAuthorType.Editor, - ["Translator"] = ExtendedBiosAuthorType.Translator + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [ExtendedBiosAuthorType.Invalid] = string.Empty, - [ExtendedBiosAuthorType.Author] = "Author", - [ExtendedBiosAuthorType.Editor] = "Editor", - [ExtendedBiosAuthorType.Translator] = "Translator" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static ExtendedBiosAuthorType ToExtendedBiosAuthorType(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this ExtendedBiosAuthorType me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToExtendedBiosAuthorTypeSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index b5834c5d..09d0d17d 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -1998,8 +1998,8 @@ public class CreateExtendedBioArgs { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public async Task CreateExtendedBio(CreateExtendedBioArgs args) { @@ -2040,12 +2040,12 @@ public class GetFirstExtendedBioByTypeRow { public string AuthorName { get; set; } public string Name { get; set; } - public ExtendedBiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; public class GetFirstExtendedBioByTypeArgs { - public ExtendedBiosBioType? BioType { get; set; } + public BiosBioType? BioType { get; set; } }; public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { @@ -2065,8 +2065,8 @@ public async Task GetFirstExtendedBioByType(GetFir { AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? (ExtendedBiosBioType? )null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } @@ -2091,8 +2091,8 @@ public async Task GetFirstExtendedBioByType(GetFir { AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? (ExtendedBiosBioType? )null : reader.GetString(2).ToExtendedBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToExtendedBiosAuthorTypeSet() + BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } From b1cc0124ef0f7231c29f5d57269bef8c60d0f7df Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 16 Aug 2025 22:26:55 +0200 Subject: [PATCH 22/33] fix: move enum logic to be behind an abstract class --- CodeGenerator/Generators/ModelsGen.cs | 8 +++++-- Drivers/DbDriver.cs | 29 ++----------------------- Drivers/EnumDbDriver.cs | 31 +++++++++++++++++++++++++++ Drivers/MySqlConnectorDriver.cs | 12 ++++++++++- Drivers/NpgsqlDriver.cs | 12 ++++++++++- Drivers/SqliteDriver.cs | 6 ------ 6 files changed, 61 insertions(+), 37 deletions(-) create mode 100644 Drivers/EnumDbDriver.cs diff --git a/CodeGenerator/Generators/ModelsGen.cs b/CodeGenerator/Generators/ModelsGen.cs index e13e5bf2..f536bd10 100644 --- a/CodeGenerator/Generators/ModelsGen.cs +++ b/CodeGenerator/Generators/ModelsGen.cs @@ -57,8 +57,12 @@ private MemberDeclarationSyntax[] GenerateEnums(Dictionary { - var enumName = dbDriver.EnumToModelName(s.Key, e.Value); - return EnumsGen.Generate(enumName, e.Value.Vals); + if (dbDriver is EnumDbDriver enumDbDriver) + { + var enumName = enumDbDriver.EnumToModelName(s.Key, e.Value); + return EnumsGen.Generate(enumName, e.Value.Vals); + } + return []; }); }).ToArray(); } diff --git a/Drivers/DbDriver.cs b/Drivers/DbDriver.cs index ba8ba5bd..1b95baa3 100644 --- a/Drivers/DbDriver.cs +++ b/Drivers/DbDriver.cs @@ -337,7 +337,7 @@ public bool IsTypeNullable(string csharpType) return Options.DotnetFramework.IsDotnetCore(); // non-primitives in .Net Core are inherently nullable } - protected string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) + protected virtual string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) { if (column.EmbedTable != null) return column.EmbedTable.Name.ToModelName(column.EmbedTable.Schema, DefaultSchema); @@ -345,9 +345,6 @@ protected string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) if (string.IsNullOrEmpty(column.Type.Name)) return "object"; - if (GetEnumType(column) is not null) - return EnumToCsharpDataType(column); - if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) return csharpType.Type; @@ -389,21 +386,8 @@ private string GetColumnReader(CsharpTypeOption csharpTypeOption, int ordinal) throw new NotSupportedException($"Could not find column mapping for type override: {csharpTypeOption.Type}"); } - private string GetEnumReader(Column column, int ordinal) + public virtual string GetColumnReader(Column column, int ordinal, Query? query) { - var enumName = EnumToModelName(column); - var enumDataType = EnumToCsharpDataType(column); - var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; - return enumDataType.StartsWith("HashSet") - ? $"{readStmt}.To{enumName}Set()" - : $"{readStmt}.To{enumName}()"; - } - - public string GetColumnReader(Column column, int ordinal, Query? query) - { - if (GetEnumType(column) is not null) - return GetEnumReader(column, ordinal); - if (FindOverrideForQueryColumn(query, column) is { CsharpType: var csharpType }) return GetColumnReader(csharpType, ordinal); @@ -416,13 +400,4 @@ public string GetColumnReader(Column column, int ordinal, Query? query) } throw new NotSupportedException($"column {column.Name} has unsupported column type: {column.Type.Name} in {GetType().Name}"); } - - /* Enum methods*/ - protected abstract Plugin.Enum? GetEnumType(Column column); - - protected abstract string EnumToCsharpDataType(Column column); - - public abstract string EnumToModelName(string schemaName, Plugin.Enum enumType); - - protected abstract string EnumToModelName(Column column); } \ No newline at end of file diff --git a/Drivers/EnumDbDriver.cs b/Drivers/EnumDbDriver.cs new file mode 100644 index 00000000..ef857c91 --- /dev/null +++ b/Drivers/EnumDbDriver.cs @@ -0,0 +1,31 @@ +using Plugin; +using SqlcGenCsharp; +using SqlcGenCsharp.Drivers; +using System.Collections.Generic; + +public abstract class EnumDbDriver(Options options, Catalog catalog, IList queries) : DbDriver(options, catalog, queries) +{ + protected abstract Enum? GetEnumType(Column column); + + protected abstract string EnumToCsharpDataType(Column column); + + public abstract string EnumToModelName(string schemaName, Enum enumType); + + protected abstract string EnumToModelName(Column column); + + protected abstract string GetEnumReader(Column column, int ordinal); + + public override string GetColumnReader(Column column, int ordinal, Query? query) + { + if (GetEnumType(column) is not null) + return GetEnumReader(column, ordinal); + return base.GetColumnReader(column, ordinal, query); + } + + protected override string GetCsharpTypeWithoutNullableSuffix(Column column, Query? query) + { + if (GetEnumType(column) is not null) + return EnumToCsharpDataType(column); + return base.GetCsharpTypeWithoutNullableSuffix(column, query); + } +} \ No newline at end of file diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 58336ef7..094311bb 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -14,7 +14,7 @@ public sealed partial class MySqlConnectorDriver( Options options, Catalog catalog, IList queries) : - DbDriver(options, catalog, queries), + EnumDbDriver(options, catalog, queries), IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { protected override Dictionary ColumnMappings { get; } = @@ -601,6 +601,16 @@ private bool IsSetDataType(Column column) return Options.UseDapper ? null : DefaultWriterFn; } + protected override string GetEnumReader(Column column, int ordinal) + { + var enumName = EnumToModelName(column); + var enumDataType = EnumToCsharpDataType(column); + var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; + return enumDataType.StartsWith("HashSet") + ? $"{readStmt}.To{enumName}Set()" + : $"{readStmt}.To{enumName}()"; + } + protected override Enum? GetEnumType(Column column) { if (!Enums.TryGetValue(string.Empty, value: out var enumsInSchema)) diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index e207ee47..4cabb8b7 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -10,7 +10,7 @@ namespace SqlcGenCsharp.Drivers; -public sealed class NpgsqlDriver : DbDriver, IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom +public sealed class NpgsqlDriver : EnumDbDriver, IOne, IMany, IExec, IExecRows, IExecLastId, ICopyFrom { public NpgsqlDriver( Options options, @@ -569,6 +569,16 @@ private static (string, string) GetEnumSchemaAndName(Column column) return (schemaName, enumName); } + protected override string GetEnumReader(Column column, int ordinal) + { + var enumName = EnumToModelName(column); + var enumDataType = EnumToCsharpDataType(column); + var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; + return enumDataType.StartsWith("HashSet") + ? $"{readStmt}.To{enumName}Set()" + : $"{readStmt}.To{enumName}()"; + } + protected override Enum? GetEnumType(Column column) { var (schemaName, enumName) = GetEnumSchemaAndName(column); diff --git a/Drivers/SqliteDriver.cs b/Drivers/SqliteDriver.cs index 2289c833..9997a8e1 100644 --- a/Drivers/SqliteDriver.cs +++ b/Drivers/SqliteDriver.cs @@ -229,10 +229,4 @@ string AddParametersToCommand() """; } } - - // Unsupported implementations of enum related logic - TODO refactor - protected override Enum? GetEnumType(Column column) => null; - protected override string EnumToCsharpDataType(Column column) => string.Empty; - public override string EnumToModelName(string schemaName, Enum enumType) => string.Empty; - protected override string EnumToModelName(Column column) => string.Empty; } \ No newline at end of file From 8ec132ea6f5a5a8875eeac5a48d7adc24d768454 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 16 Aug 2025 22:34:44 +0200 Subject: [PATCH 23/33] fix: postgres EnumToCsharpDataType --- Drivers/MySqlConnectorDriver.cs | 3 +-- Drivers/NpgsqlDriver.cs | 5 +---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/Drivers/MySqlConnectorDriver.cs b/Drivers/MySqlConnectorDriver.cs index 094311bb..f43f2574 100644 --- a/Drivers/MySqlConnectorDriver.cs +++ b/Drivers/MySqlConnectorDriver.cs @@ -604,9 +604,8 @@ private bool IsSetDataType(Column column) protected override string GetEnumReader(Column column, int ordinal) { var enumName = EnumToModelName(column); - var enumDataType = EnumToCsharpDataType(column); var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; - return enumDataType.StartsWith("HashSet") + return IsSetDataType(column) ? $"{readStmt}.To{enumName}Set()" : $"{readStmt}.To{enumName}()"; } diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index 4cabb8b7..8e7b1f01 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -572,11 +572,8 @@ private static (string, string) GetEnumSchemaAndName(Column column) protected override string GetEnumReader(Column column, int ordinal) { var enumName = EnumToModelName(column); - var enumDataType = EnumToCsharpDataType(column); var readStmt = $"{Variable.Reader.AsVarName()}.GetString({ordinal})"; - return enumDataType.StartsWith("HashSet") - ? $"{readStmt}.To{enumName}Set()" - : $"{readStmt}.To{enumName}()"; + return $"{readStmt}.To{enumName}()"; } protected override Enum? GetEnumType(Column column) From de59496846ef2c6cefa242b62399e16c6aac4fa6 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Tue, 19 Aug 2025 22:02:44 +0200 Subject: [PATCH 24/33] fix: separate string data types to their own table --- .../Templates/PostgresTests.cs | 23 +- end2end/EndToEndTests/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 21 +- end2end/EndToEndTests/NpgsqlTester.cs | 1 + .../EndToEndTests/NpgsqlTester.generated.cs | 21 +- .../EndToEndTestsLegacy/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 21 +- end2end/EndToEndTestsLegacy/NpgsqlTester.cs | 1 + .../NpgsqlTester.generated.cs | 21 +- examples/NpgsqlDapperExample/Models.cs | 13 +- examples/NpgsqlDapperExample/QuerySql.cs | 168 ++++- examples/NpgsqlDapperExample/request.json | 688 ++++++++++-------- examples/NpgsqlDapperExample/request.message | 203 +++--- examples/NpgsqlDapperLegacyExample/Models.cs | 13 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 168 ++++- .../NpgsqlDapperLegacyExample/request.json | 688 ++++++++++-------- .../NpgsqlDapperLegacyExample/request.message | 203 +++--- examples/NpgsqlExample/Models.cs | 3 +- examples/NpgsqlExample/QuerySql.cs | 298 ++++++-- examples/NpgsqlExample/request.json | 688 ++++++++++-------- examples/NpgsqlExample/request.message | 203 +++--- examples/NpgsqlLegacyExample/Models.cs | 13 +- examples/NpgsqlLegacyExample/QuerySql.cs | 339 ++++++--- examples/NpgsqlLegacyExample/request.json | 688 ++++++++++-------- examples/NpgsqlLegacyExample/request.message | 203 +++--- examples/config/postgresql/types/query.sql | 86 ++- examples/config/postgresql/types/schema.sql | 15 +- 27 files changed, 2803 insertions(+), 1988 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 986c1e8b..993ff402 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -19,7 +19,7 @@ public async Task TestPostgresStringTypes( string cBpchar, string cText) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CChar = cChar, CVarchar = cVarchar, @@ -28,7 +28,7 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CText = cText, }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresStringTypesRow { CChar = cChar, CVarchar = cVarchar, @@ -37,10 +37,10 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CText = cText, }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresStringTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPostgresStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); @@ -281,7 +281,7 @@ public async Task TestStringCopyFrom( string cText) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresStringTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, @@ -290,8 +290,8 @@ public async Task TestStringCopyFrom( CText = cText }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + await QuerySql.InsertPostgresStringTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -300,10 +300,10 @@ public async Task TestStringCopyFrom( CBpchar = cBpchar, CText = cText }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresStringTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetPostgresStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); @@ -871,9 +871,12 @@ public async Task TestPostgresDataTypesOverride( await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, - CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs + { + CVarchar = cVarchar + }); var expected = new QuerySql.GetPostgresFunctionsRow { diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.cs b/end2end/EndToEndTests/NpgsqlDapperTester.cs index 7dfc2e07..9d1a8078 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index e64cb342..4213093b 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -319,8 +319,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null)] public async Task TestPostgresStringTypes(string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); + var expected = new QuerySql.GetPostgresStringTypesRow { CChar = cChar, CVarchar = cVarchar, @@ -328,9 +328,9 @@ public async Task TestPostgresStringTypes(string cChar, string cVarchar, string CBpchar = cBpchar, CText = cText, }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPostgresStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); @@ -460,7 +460,8 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { MaxInteger = cInteger, @@ -508,9 +509,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(10, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresStringTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); + await QuerySql.InsertPostgresStringTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -519,9 +520,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarcha CBpchar = cBpchar, CText = cText }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetPostgresStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); diff --git a/end2end/EndToEndTests/NpgsqlTester.cs b/end2end/EndToEndTests/NpgsqlTester.cs index 6105a2dc..35340c87 100644 --- a/end2end/EndToEndTests/NpgsqlTester.cs +++ b/end2end/EndToEndTests/NpgsqlTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTables() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 2b624b9b..53246f34 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -319,8 +319,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null)] public async Task TestPostgresStringTypes(string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); + var expected = new QuerySql.GetPostgresStringTypesRow { CChar = cChar, CVarchar = cVarchar, @@ -328,9 +328,9 @@ public async Task TestPostgresStringTypes(string cChar, string cVarchar, string CBpchar = cBpchar, CText = cText, }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresStringTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPostgresStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); @@ -460,7 +460,8 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { MaxInteger = cInteger, @@ -508,9 +509,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(10, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresStringTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); + await QuerySql.InsertPostgresStringTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -519,9 +520,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarcha CBpchar = cBpchar, CText = cText }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresStringTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetPostgresStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs index 0dab21a9..05ed0055 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index f554ab29..465e2cb4 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -319,8 +319,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null)] public async Task TestPostgresStringTypes(string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); + var expected = new QuerySql.GetPostgresStringTypesRow { CChar = cChar, CVarchar = cVarchar, @@ -328,9 +328,9 @@ public async Task TestPostgresStringTypes(string cChar, string cVarchar, string CBpchar = cBpchar, CText = cText, }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPostgresStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); @@ -460,7 +460,8 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { MaxInteger = cInteger, @@ -508,9 +509,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(10, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresStringTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); + await QuerySql.InsertPostgresStringTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -519,9 +520,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarcha CBpchar = cBpchar, CText = cText }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetPostgresStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs index cc33e3ec..6bd03fbd 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 3232c115..45eec99d 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -319,8 +319,8 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth [TestCase(null, null, null, null, null)] public async Task TestPostgresStringTypes(string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText, }); + var expected = new QuerySql.GetPostgresStringTypesRow { CChar = cChar, CVarchar = cVarchar, @@ -328,9 +328,9 @@ public async Task TestPostgresStringTypes(string cChar, string cVarchar, string CBpchar = cBpchar, CText = cText, }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresStringTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPostgresStringTypesRow y) { Assert.That(x.CChar, Is.EqualTo(y.CChar)); Assert.That(x.CVarchar, Is.EqualTo(y.CVarchar)); @@ -460,7 +460,8 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CVarchar = cVarchar, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { MaxInteger = cInteger, @@ -508,9 +509,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(10, null, null, null, null, null)] public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarchar, string cCharacterVarying, string cBpchar, string cText) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresStringTypesBatchArgs { CChar = cChar, CVarchar = cVarchar, CCharacterVarying = cCharacterVarying, CBpchar = cBpchar, CText = cText }).ToList(); + await QuerySql.InsertPostgresStringTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresStringTypesCntRow { Cnt = batchSize, CChar = cChar, @@ -519,9 +520,9 @@ public async Task TestStringCopyFrom(int batchSize, string cChar, string cVarcha CBpchar = cBpchar, CText = cText }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresStringTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetPostgresStringTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CChar, Is.EqualTo(y.CChar)); diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 66edb512..d3526aa5 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -26,11 +26,6 @@ public class PostgresType public DateTime? CTimestamp { get; init; } public DateTime? CTimestampWithTz { get; init; } public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } public PhysicalAddress? CMacaddr { get; init; } @@ -38,6 +33,14 @@ public class PostgresType public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } }; +public class PostgresStringType +{ + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } +}; public class PostgresUnstructuredType { public JsonElement? CJson { get; init; } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 015e67b9..c4543e44 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -43,7 +43,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -61,11 +61,6 @@ public class InsertPostgresTypesArgs public DateTime? CTimestamp { get; init; } public DateTime? CTimestampWithTz { get; init; } public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } public NpgsqlCidr? CCidr { get; init; } @@ -91,11 +86,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_timestamp", args.CTimestamp); queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); queryParams.Add("c_interval", args.CInterval); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); queryParams.Add("c_cidr", args.CCidr); @@ -114,7 +104,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; init; } @@ -131,11 +121,6 @@ public class InsertPostgresTypesBatchArgs public DateTime? CTimestamp { get; init; } public DateTime? CTimestampWithTz { get; init; } public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } public Guid? CUuid { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } @@ -165,11 +150,6 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CTimestamp); await writer.WriteAsync(row.CTimestampWithTz); await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); await writer.WriteAsync(row.CUuid); await writer.WriteAsync(row.CCidr); await writer.WriteAsync(row.CInet); @@ -183,7 +163,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -201,11 +181,6 @@ public class GetPostgresTypesRow public DateTime? CTimestamp { get; init; } public DateTime? CTimestampWithTz { get; init; } public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } public NpgsqlCidr? CCidr { get; init; } @@ -229,7 +204,7 @@ public class GetPostgresTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; init; } @@ -246,11 +221,6 @@ public class GetPostgresTypesCntRow public DateTime? CTimestamp { get; init; } public DateTime? CTimestampWithTz { get; init; } public TimeSpan? CInterval { get; init; } - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } public Guid? CUuid { get; init; } public NpgsqlCidr? CCidr { get; init; } public IPAddress? CInet { get; init; } @@ -273,7 +243,7 @@ public class GetPostgresTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; init; } @@ -311,6 +281,134 @@ public async Task TruncatePostgresTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs + { + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + }; + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs + { + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + }; + public async Task InsertPostgresStringTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public class GetPostgresStringTypesRow + { + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + }; + public async Task GetPostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresStringTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow + { + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + public required long Cnt { get; init; } + }; + public async Task GetPostgresStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index 0d2cf59c..ad65f941 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -194,116 +194,123 @@ } }, { - "name": "c_char", + "name": "c_cidr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "bpchar" + "name": "cidr" } }, { - "name": "c_varchar", + "name": "c_inet", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_character_varying", + "name": "c_macaddr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "macaddr" } }, { - "name": "c_bpchar", + "name": "c_macaddr8", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "macaddr8" } }, { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" } }, { - "name": "c_cidr", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "cidr" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_inet", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "bpchar" } }, { - "name": "c_macaddr", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_macaddr8", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_uuid", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "uuid" + "name": "bpchar" } }, { - "name": "c_enum", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "c_enum" + "name": "text" } } ] @@ -32554,7 +32561,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32800,86 +32807,6 @@ }, { "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, "column": { "name": "c_uuid", "length": -1, @@ -32895,7 +32822,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_enum", "length": -1, @@ -32905,7 +32832,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_cidr", "length": -1, @@ -32921,7 +32848,7 @@ } }, { - "number": 24, + "number": 19, "column": { "name": "c_inet", "length": -1, @@ -32937,7 +32864,7 @@ } }, { - "number": 25, + "number": 20, "column": { "name": "c_macaddr", "length": -1, @@ -32947,7 +32874,7 @@ } }, { - "number": 26, + "number": 21, "column": { "name": "c_macaddr8", "length": -1, @@ -32966,7 +32893,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33182,81 +33109,6 @@ }, { "number": 15, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 16, - "column": { - "name": "c_varchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 17, - "column": { - "name": "c_character_varying", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 18, - "column": { - "name": "c_bpchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 19, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 20, "column": { "name": "c_uuid", "length": -1, @@ -33271,7 +33123,7 @@ } }, { - "number": 21, + "number": 16, "column": { "name": "c_cidr", "length": -1, @@ -33286,7 +33138,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_inet", "length": -1, @@ -33301,7 +33153,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_macaddr", "length": -1, @@ -33322,7 +33174,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33504,64 +33356,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33629,7 +33423,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33799,64 +33593,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33914,7 +33650,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -33954,6 +33690,328 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 7cd672ac..6040cf78 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunner╖ъ public"∙publicТ +./dist/LocalRunnerЎъ public"╕publicч postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -34,15 +34,7 @@ pg_catalog timestampM pg_catalog timestamptzA c_interval0         Rpostgres_typesb -pg_cataloginterval; -c_char0         Rpostgres_typesb -pg_catalogbpchar? - c_varchar0         Rpostgres_typesb -pg_catalogvarcharI -c_character_varying0         Rpostgres_typesb -pg_catalogvarchar1 -c_bpchar0         Rpostgres_typesbbpchar- -c_text0         Rpostgres_typesbtext- +pg_cataloginterval- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -50,7 +42,16 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumн +c_enum0         Rpostgres_typesbc_enumч +postgres_string_typesB +c_char0         Rpostgres_string_typesb +pg_catalogbpcharF + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharP +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarchar8 +c_bpchar0         Rpostgres_string_typesbbpchar4 +c_text0         Rpostgres_string_typesbtextн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10218,8 +10219,8 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir· -й +bio_type Autobiography BiographyMemoir╫ +о INSERT INTO postgres_types ( c_boolean, @@ -10237,11 +10238,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10266,16 +10262,11 @@ VALUES ( $14, $15, $16, - $17, + $17::c_enum, $18, $19, - $20, - $21, - $22::c_enum, - $23, - $24, - $25::macaddr, - $26::macaddr8 + $20::macaddr, + $21::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10296,21 +10287,16 @@ c_smallint*TP c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*PL -c_char0         8Rpublicpostgres_typesbpg_catalog.bpcharzc_char*WS - c_varchar0         8Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*kg -c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE -c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? -c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_interval*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesЗ -▄INSERT INTO postgres_types +macaddr82 Basic types : query.sqlBpostgres_typesю +сINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10326,11 +10312,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10354,12 +10335,7 @@ VALUES ( $15, $16, $17, - $18, - $19, - $20, - $21, - $22, - $23 + $18 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10379,17 +10355,12 @@ c_smallint*RN c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*NJ -c_char0         Rpublicpostgres_typesbpg_catalog.bpcharzc_char*UQ - c_varchar0         Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*ie -c_character_varying0         Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*GC -c_bpchar0         Rpublicpostgres_typesbbpcharzc_bpchar*A= -c_text0         Rpublicpostgres_typesbtextzc_text*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу -└SELECT +c_interval*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= +c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= +c_inet0         Rpublicpostgres_typesbinetzc_inet*JF + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ +ЄSELECT c_boolean, c_bit, c_smallint, @@ -10405,11 +10376,6 @@ c_interval*NJ c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10449,23 +10415,15 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! -c_macaddr80         btext: query.sqlд -·SELECT +c_macaddr80         btext: query.sqlг +▐SELECT c_smallint, c_boolean, c_integer, @@ -10480,11 +10438,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10506,11 +10459,6 @@ GROUP BY c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10545,32 +10493,87 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql░ -ЖSELECT +cnt0         @bbigint: query.sql╤ +зSELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM postgres_typesGetPostgresFunctions:one"( +FROM postgres_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b anyarray"* max_timestamp0         @b anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +П +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) +VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypes:exec*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text2 String types : query.sqlBpostgres_string_typesв +ПINSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypesBatch :copyfrom*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text: query.sqlBpostgres_string_typesХ +bSELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1GetPostgresStringTypes:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text: query.sqlU +$TRUNCATE TABLE postgres_string_typesTruncatePostgresStringTypes:exec: query.sql╕ +сSELECT + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text, + COUNT(*) AS cnt +FROM postgres_string_types +GROUP BY + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +LIMIT 1GetPostgresStringTypesCnt:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text" +cnt0         @bbigint: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index dcb2cc25..a6bffae3 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -27,11 +27,6 @@ public class PostgresType public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -39,6 +34,14 @@ public class PostgresType public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; + public class PostgresStringType + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; public class PostgresUnstructuredType { public JsonElement? CJson { get; set; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index e9ba900e..ed8940c4 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -44,7 +44,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -62,11 +62,6 @@ public class InsertPostgresTypesArgs public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } @@ -92,11 +87,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_timestamp", args.CTimestamp); queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); queryParams.Add("c_interval", args.CInterval); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); queryParams.Add("c_cidr", args.CCidr); @@ -115,7 +105,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; set; } @@ -132,11 +122,6 @@ public class InsertPostgresTypesBatchArgs public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } @@ -166,11 +151,6 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CTimestamp); await writer.WriteAsync(row.CTimestampWithTz); await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); await writer.WriteAsync(row.CUuid); await writer.WriteAsync(row.CCidr); await writer.WriteAsync(row.CInet); @@ -184,7 +164,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -202,11 +182,6 @@ public class GetPostgresTypesRow public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } @@ -230,7 +205,7 @@ public async Task GetPostgresTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -247,11 +222,6 @@ public class GetPostgresTypesCntRow public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } @@ -274,7 +244,7 @@ public async Task GetPostgresTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -312,6 +282,134 @@ public async Task TruncatePostgresTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task InsertPostgresStringTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public class GetPostgresStringTypesRow + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task GetPostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresStringTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 13f6763d..91db06a8 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -194,116 +194,123 @@ } }, { - "name": "c_char", + "name": "c_cidr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "bpchar" + "name": "cidr" } }, { - "name": "c_varchar", + "name": "c_inet", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_character_varying", + "name": "c_macaddr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "macaddr" } }, { - "name": "c_bpchar", + "name": "c_macaddr8", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "macaddr8" } }, { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" } }, { - "name": "c_cidr", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "cidr" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_inet", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "bpchar" } }, { - "name": "c_macaddr", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_macaddr8", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_uuid", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "uuid" + "name": "bpchar" } }, { - "name": "c_enum", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "c_enum" + "name": "text" } } ] @@ -32554,7 +32561,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32800,86 +32807,6 @@ }, { "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, "column": { "name": "c_uuid", "length": -1, @@ -32895,7 +32822,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_enum", "length": -1, @@ -32905,7 +32832,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_cidr", "length": -1, @@ -32921,7 +32848,7 @@ } }, { - "number": 24, + "number": 19, "column": { "name": "c_inet", "length": -1, @@ -32937,7 +32864,7 @@ } }, { - "number": 25, + "number": 20, "column": { "name": "c_macaddr", "length": -1, @@ -32947,7 +32874,7 @@ } }, { - "number": 26, + "number": 21, "column": { "name": "c_macaddr8", "length": -1, @@ -32966,7 +32893,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33182,81 +33109,6 @@ }, { "number": 15, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 16, - "column": { - "name": "c_varchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 17, - "column": { - "name": "c_character_varying", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 18, - "column": { - "name": "c_bpchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 19, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 20, "column": { "name": "c_uuid", "length": -1, @@ -33271,7 +33123,7 @@ } }, { - "number": 21, + "number": 16, "column": { "name": "c_cidr", "length": -1, @@ -33286,7 +33138,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_inet", "length": -1, @@ -33301,7 +33153,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_macaddr", "length": -1, @@ -33322,7 +33174,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33504,64 +33356,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33629,7 +33423,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33799,64 +33593,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33914,7 +33650,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -33954,6 +33690,328 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 03598b2e..7ebcf1f2 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner╖ъ public"∙publicТ +./dist/LocalRunnerЎъ public"╕publicч postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -34,15 +34,7 @@ pg_catalog timestampM pg_catalog timestamptzA c_interval0         Rpostgres_typesb -pg_cataloginterval; -c_char0         Rpostgres_typesb -pg_catalogbpchar? - c_varchar0         Rpostgres_typesb -pg_catalogvarcharI -c_character_varying0         Rpostgres_typesb -pg_catalogvarchar1 -c_bpchar0         Rpostgres_typesbbpchar- -c_text0         Rpostgres_typesbtext- +pg_cataloginterval- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -50,7 +42,16 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumн +c_enum0         Rpostgres_typesbc_enumч +postgres_string_typesB +c_char0         Rpostgres_string_typesb +pg_catalogbpcharF + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharP +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarchar8 +c_bpchar0         Rpostgres_string_typesbbpchar4 +c_text0         Rpostgres_string_typesbtextн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10218,8 +10219,8 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir· -й +bio_type Autobiography BiographyMemoir╫ +о INSERT INTO postgres_types ( c_boolean, @@ -10237,11 +10238,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10266,16 +10262,11 @@ VALUES ( $14, $15, $16, - $17, + $17::c_enum, $18, $19, - $20, - $21, - $22::c_enum, - $23, - $24, - $25::macaddr, - $26::macaddr8 + $20::macaddr, + $21::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10296,21 +10287,16 @@ c_smallint*TP c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*PL -c_char0         8Rpublicpostgres_typesbpg_catalog.bpcharzc_char*WS - c_varchar0         8Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*kg -c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE -c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? -c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_interval*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesЗ -▄INSERT INTO postgres_types +macaddr82 Basic types : query.sqlBpostgres_typesю +сINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10326,11 +10312,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10354,12 +10335,7 @@ VALUES ( $15, $16, $17, - $18, - $19, - $20, - $21, - $22, - $23 + $18 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10379,17 +10355,12 @@ c_smallint*RN c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*NJ -c_char0         Rpublicpostgres_typesbpg_catalog.bpcharzc_char*UQ - c_varchar0         Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*ie -c_character_varying0         Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*GC -c_bpchar0         Rpublicpostgres_typesbbpcharzc_bpchar*A= -c_text0         Rpublicpostgres_typesbtextzc_text*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу -└SELECT +c_interval*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= +c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= +c_inet0         Rpublicpostgres_typesbinetzc_inet*JF + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ +ЄSELECT c_boolean, c_bit, c_smallint, @@ -10405,11 +10376,6 @@ c_interval*NJ c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10449,23 +10415,15 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! -c_macaddr80         btext: query.sqlд -·SELECT +c_macaddr80         btext: query.sqlг +▐SELECT c_smallint, c_boolean, c_integer, @@ -10480,11 +10438,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10506,11 +10459,6 @@ GROUP BY c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10545,32 +10493,87 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql░ -ЖSELECT +cnt0         @bbigint: query.sql╤ +зSELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM postgres_typesGetPostgresFunctions:one"( +FROM postgres_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b anyarray"* max_timestamp0         @b anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +П +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) +VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypes:exec*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text2 String types : query.sqlBpostgres_string_typesв +ПINSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypesBatch :copyfrom*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text: query.sqlBpostgres_string_typesХ +bSELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1GetPostgresStringTypes:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text: query.sqlU +$TRUNCATE TABLE postgres_string_typesTruncatePostgresStringTypes:exec: query.sql╕ +сSELECT + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text, + COUNT(*) AS cnt +FROM postgres_string_types +GROUP BY + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +LIMIT 1GetPostgresStringTypesCnt:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text" +cnt0         @bbigint: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index 05a3fb90..778db110 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -9,7 +9,8 @@ using System.Xml; namespace NpgsqlExampleGen; -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid, CEnum? CEnum); +public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid, CEnum? CEnum); +public readonly record struct PostgresStringType(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); public readonly record struct PostgresUnstructuredType(JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index fe79e682..e91c824a 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -40,8 +40,8 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -65,11 +65,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); @@ -104,11 +99,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); @@ -119,8 +109,8 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -145,11 +135,6 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); @@ -163,8 +148,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -194,17 +179,12 @@ public async Task InsertPostgresTypesBatch(List ar CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CEnum = reader.IsDBNull(21) ? null : reader.GetString(21).ToCEnum(), - CCidr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), - CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) + CUuid = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), + CEnum = reader.IsDBNull(16) ? null : reader.GetString(16).ToCEnum(), + CCidr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), + CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) }; } } @@ -241,17 +221,12 @@ public async Task InsertPostgresTypesBatch(List ar CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CEnum = reader.IsDBNull(21) ? null : reader.GetString(21).ToCEnum(), - CCidr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), - CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) + CUuid = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), + CEnum = reader.IsDBNull(16) ? null : reader.GetString(16).ToCEnum(), + CCidr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), + CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) }; } } @@ -260,8 +235,8 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; - public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); public async Task GetPostgresTypesCnt() { if (this.Transaction == null) @@ -290,16 +265,11 @@ public async Task InsertPostgresTypesBatch(List ar CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + CUuid = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), + CCidr = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), + CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), + CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + Cnt = reader.GetInt64(18) }; } } @@ -335,16 +305,11 @@ public async Task InsertPostgresTypesBatch(List ar CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + CUuid = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), + CCidr = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), + CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), + CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + Cnt = reader.GetInt64(18) }; } } @@ -353,7 +318,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() { @@ -430,6 +395,207 @@ public async Task TruncatePostgresTypes() } } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public readonly record struct InsertPostgresStringTypesArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) + { + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresStringTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresStringTypesBatchArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task InsertPostgresStringTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public readonly record struct GetPostgresStringTypesRow(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task GetPostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(TruncatePostgresStringTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresStringTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public readonly record struct GetPostgresStringTypesCntRow(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, long Cnt); + public async Task GetPostgresStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + + return null; + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index a557de82..062fd68c 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -194,116 +194,123 @@ } }, { - "name": "c_char", + "name": "c_cidr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "bpchar" + "name": "cidr" } }, { - "name": "c_varchar", + "name": "c_inet", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_character_varying", + "name": "c_macaddr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "macaddr" } }, { - "name": "c_bpchar", + "name": "c_macaddr8", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "macaddr8" } }, { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" } }, { - "name": "c_cidr", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "cidr" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_inet", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "bpchar" } }, { - "name": "c_macaddr", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_macaddr8", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_uuid", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "uuid" + "name": "bpchar" } }, { - "name": "c_enum", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "c_enum" + "name": "text" } } ] @@ -32554,7 +32561,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32800,86 +32807,6 @@ }, { "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, "column": { "name": "c_uuid", "length": -1, @@ -32895,7 +32822,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_enum", "length": -1, @@ -32905,7 +32832,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_cidr", "length": -1, @@ -32921,7 +32848,7 @@ } }, { - "number": 24, + "number": 19, "column": { "name": "c_inet", "length": -1, @@ -32937,7 +32864,7 @@ } }, { - "number": 25, + "number": 20, "column": { "name": "c_macaddr", "length": -1, @@ -32947,7 +32874,7 @@ } }, { - "number": 26, + "number": 21, "column": { "name": "c_macaddr8", "length": -1, @@ -32966,7 +32893,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33182,81 +33109,6 @@ }, { "number": 15, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 16, - "column": { - "name": "c_varchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 17, - "column": { - "name": "c_character_varying", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 18, - "column": { - "name": "c_bpchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 19, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 20, "column": { "name": "c_uuid", "length": -1, @@ -33271,7 +33123,7 @@ } }, { - "number": 21, + "number": 16, "column": { "name": "c_cidr", "length": -1, @@ -33286,7 +33138,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_inet", "length": -1, @@ -33301,7 +33153,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_macaddr", "length": -1, @@ -33322,7 +33174,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33504,64 +33356,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33629,7 +33423,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33799,64 +33593,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33914,7 +33650,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -33954,6 +33690,328 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index d70d6924..77647b3b 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunner╖ъ public"∙publicТ +./dist/LocalRunnerЎъ public"╕publicч postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -34,15 +34,7 @@ pg_catalog timestampM pg_catalog timestamptzA c_interval0         Rpostgres_typesb -pg_cataloginterval; -c_char0         Rpostgres_typesb -pg_catalogbpchar? - c_varchar0         Rpostgres_typesb -pg_catalogvarcharI -c_character_varying0         Rpostgres_typesb -pg_catalogvarchar1 -c_bpchar0         Rpostgres_typesbbpchar- -c_text0         Rpostgres_typesbtext- +pg_cataloginterval- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -50,7 +42,16 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumн +c_enum0         Rpostgres_typesbc_enumч +postgres_string_typesB +c_char0         Rpostgres_string_typesb +pg_catalogbpcharF + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharP +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarchar8 +c_bpchar0         Rpostgres_string_typesbbpchar4 +c_text0         Rpostgres_string_typesbtextн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10218,8 +10219,8 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir· -й +bio_type Autobiography BiographyMemoir╫ +о INSERT INTO postgres_types ( c_boolean, @@ -10237,11 +10238,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10266,16 +10262,11 @@ VALUES ( $14, $15, $16, - $17, + $17::c_enum, $18, $19, - $20, - $21, - $22::c_enum, - $23, - $24, - $25::macaddr, - $26::macaddr8 + $20::macaddr, + $21::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10296,21 +10287,16 @@ c_smallint*TP c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*PL -c_char0         8Rpublicpostgres_typesbpg_catalog.bpcharzc_char*WS - c_varchar0         8Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*kg -c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE -c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? -c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_interval*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesЗ -▄INSERT INTO postgres_types +macaddr82 Basic types : query.sqlBpostgres_typesю +сINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10326,11 +10312,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10354,12 +10335,7 @@ VALUES ( $15, $16, $17, - $18, - $19, - $20, - $21, - $22, - $23 + $18 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10379,17 +10355,12 @@ c_smallint*RN c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*NJ -c_char0         Rpublicpostgres_typesbpg_catalog.bpcharzc_char*UQ - c_varchar0         Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*ie -c_character_varying0         Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*GC -c_bpchar0         Rpublicpostgres_typesbbpcharzc_bpchar*A= -c_text0         Rpublicpostgres_typesbtextzc_text*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу -└SELECT +c_interval*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= +c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= +c_inet0         Rpublicpostgres_typesbinetzc_inet*JF + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ +ЄSELECT c_boolean, c_bit, c_smallint, @@ -10405,11 +10376,6 @@ c_interval*NJ c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10449,23 +10415,15 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! -c_macaddr80         btext: query.sqlд -·SELECT +c_macaddr80         btext: query.sqlг +▐SELECT c_smallint, c_boolean, c_integer, @@ -10480,11 +10438,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10506,11 +10459,6 @@ GROUP BY c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10545,32 +10493,87 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql░ -ЖSELECT +cnt0         @bbigint: query.sql╤ +зSELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM postgres_typesGetPostgresFunctions:one"( +FROM postgres_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b anyarray"* max_timestamp0         @b anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +П +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) +VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypes:exec*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text2 String types : query.sqlBpostgres_string_typesв +ПINSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypesBatch :copyfrom*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text: query.sqlBpostgres_string_typesХ +bSELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1GetPostgresStringTypes:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text: query.sqlU +$TRUNCATE TABLE postgres_string_typesTruncatePostgresStringTypes:exec: query.sql╕ +сSELECT + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text, + COUNT(*) AS cnt +FROM postgres_string_types +GROUP BY + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +LIMIT 1GetPostgresStringTypesCnt:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text" +cnt0         @bbigint: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 42367e29..a26cf9c9 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -27,11 +27,6 @@ public class PostgresType public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } public PhysicalAddress CMacaddr { get; set; } @@ -39,6 +34,14 @@ public class PostgresType public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; + public class PostgresStringType + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; public class PostgresUnstructuredType { public JsonElement? CJson { get; set; } diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 3a93c541..f610eb53 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -41,7 +41,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -59,11 +59,6 @@ public class InsertPostgresTypesArgs public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } @@ -94,11 +89,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); @@ -133,11 +123,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); @@ -148,7 +133,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; set; } @@ -165,11 +150,6 @@ public class InsertPostgresTypesBatchArgs public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } @@ -199,11 +179,6 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); @@ -217,7 +192,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -235,11 +210,6 @@ public class GetPostgresTypesRow public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } public NpgsqlCidr? CCidr { get; set; } @@ -276,17 +246,12 @@ public async Task GetPostgresTypes() CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CEnum = reader.IsDBNull(21) ? (CEnum? )null : reader.GetString(21).ToCEnum(), - CCidr = reader.IsDBNull(22) ? (NpgsqlCidr? )null : reader.GetFieldValue(22), - CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), - CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) + CUuid = reader.IsDBNull(15) ? (Guid? )null : reader.GetFieldValue(15), + CEnum = reader.IsDBNull(16) ? (CEnum? )null : reader.GetString(16).ToCEnum(), + CCidr = reader.IsDBNull(17) ? (NpgsqlCidr? )null : reader.GetFieldValue(17), + CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), + CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) }; } } @@ -323,17 +288,12 @@ public async Task GetPostgresTypes() CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CChar = reader.IsDBNull(15) ? null : reader.GetString(15), - CVarchar = reader.IsDBNull(16) ? null : reader.GetString(16), - CCharacterVarying = reader.IsDBNull(17) ? null : reader.GetString(17), - CBpchar = reader.IsDBNull(18) ? null : reader.GetString(18), - CText = reader.IsDBNull(19) ? null : reader.GetString(19), - CUuid = reader.IsDBNull(20) ? (Guid? )null : reader.GetFieldValue(20), - CEnum = reader.IsDBNull(21) ? (CEnum? )null : reader.GetString(21).ToCEnum(), - CCidr = reader.IsDBNull(22) ? (NpgsqlCidr? )null : reader.GetFieldValue(22), - CInet = reader.IsDBNull(23) ? null : reader.GetFieldValue(23), - CMacaddr = reader.IsDBNull(24) ? null : reader.GetFieldValue(24), - CMacaddr8 = reader.IsDBNull(25) ? null : reader.GetString(25) + CUuid = reader.IsDBNull(15) ? (Guid? )null : reader.GetFieldValue(15), + CEnum = reader.IsDBNull(16) ? (CEnum? )null : reader.GetString(16).ToCEnum(), + CCidr = reader.IsDBNull(17) ? (NpgsqlCidr? )null : reader.GetFieldValue(17), + CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), + CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), + CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) }; } } @@ -342,7 +302,7 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_char, c_varchar, c_character_varying, c_bpchar, c_text, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -359,11 +319,6 @@ public class GetPostgresTypesCntRow public DateTime? CTimestamp { get; set; } public DateTime? CTimestampWithTz { get; set; } public TimeSpan? CInterval { get; set; } - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } public Guid? CUuid { get; set; } public NpgsqlCidr? CCidr { get; set; } public IPAddress CInet { get; set; } @@ -398,16 +353,11 @@ public async Task GetPostgresTypesCnt() CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + CUuid = reader.IsDBNull(14) ? (Guid? )null : reader.GetFieldValue(14), + CCidr = reader.IsDBNull(15) ? (NpgsqlCidr? )null : reader.GetFieldValue(15), + CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), + CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + Cnt = reader.GetInt64(18) }; } } @@ -443,16 +393,11 @@ public async Task GetPostgresTypesCnt() CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CChar = reader.IsDBNull(14) ? null : reader.GetString(14), - CVarchar = reader.IsDBNull(15) ? null : reader.GetString(15), - CCharacterVarying = reader.IsDBNull(16) ? null : reader.GetString(16), - CBpchar = reader.IsDBNull(17) ? null : reader.GetString(17), - CText = reader.IsDBNull(18) ? null : reader.GetString(18), - CUuid = reader.IsDBNull(19) ? (Guid? )null : reader.GetFieldValue(19), - CCidr = reader.IsDBNull(20) ? (NpgsqlCidr? )null : reader.GetFieldValue(20), - CInet = reader.IsDBNull(21) ? null : reader.GetFieldValue(21), - CMacaddr = reader.IsDBNull(22) ? null : reader.GetFieldValue(22), - Cnt = reader.GetInt64(23) + CUuid = reader.IsDBNull(14) ? (Guid? )null : reader.GetFieldValue(14), + CCidr = reader.IsDBNull(15) ? (NpgsqlCidr? )null : reader.GetFieldValue(15), + CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), + CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), + Cnt = reader.GetInt64(18) }; } } @@ -461,7 +406,7 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -543,6 +488,236 @@ public async Task TruncatePostgresTypes() } } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) + { + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresStringTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task InsertPostgresStringTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public class GetPostgresStringTypesRow + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task GetPostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresStringTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresStringTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresStringTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + + return null; + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index c67fd5c2..82002a72 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -194,116 +194,123 @@ } }, { - "name": "c_char", + "name": "c_cidr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "bpchar" + "name": "cidr" } }, { - "name": "c_varchar", + "name": "c_inet", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_character_varying", + "name": "c_macaddr", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "macaddr" } }, { - "name": "c_bpchar", + "name": "c_macaddr8", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "macaddr8" } }, { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" } }, { - "name": "c_cidr", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "cidr" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_inet", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "bpchar" } }, { - "name": "c_macaddr", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_macaddr8", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "varchar" } }, { - "name": "c_uuid", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "uuid" + "name": "bpchar" } }, { - "name": "c_enum", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "c_enum" + "name": "text" } } ] @@ -32554,7 +32561,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17,\n $18,\n $19,\n $20,\n $21,\n $22::c_enum,\n $23,\n $24,\n $25::macaddr,\n $26::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32800,86 +32807,6 @@ }, { "number": 16, - "column": { - "name": "c_char", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 17, - "column": { - "name": "c_varchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 18, - "column": { - "name": "c_character_varying", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 19, - "column": { - "name": "c_bpchar", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 20, - "column": { - "name": "c_text", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 21, "column": { "name": "c_uuid", "length": -1, @@ -32895,7 +32822,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_enum", "length": -1, @@ -32905,7 +32832,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_cidr", "length": -1, @@ -32921,7 +32848,7 @@ } }, { - "number": 24, + "number": 19, "column": { "name": "c_inet", "length": -1, @@ -32937,7 +32864,7 @@ } }, { - "number": 25, + "number": 20, "column": { "name": "c_macaddr", "length": -1, @@ -32947,7 +32874,7 @@ } }, { - "number": 26, + "number": 21, "column": { "name": "c_macaddr8", "length": -1, @@ -32966,7 +32893,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18,\n $19,\n $20,\n $21,\n $22,\n $23\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33182,81 +33109,6 @@ }, { "number": 15, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bpchar" - }, - "originalName": "c_char" - } - }, - { - "number": 16, - "column": { - "name": "c_varchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 17, - "column": { - "name": "c_character_varying", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.varchar" - }, - "originalName": "c_character_varying" - } - }, - { - "number": 18, - "column": { - "name": "c_bpchar", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - } - }, - { - "number": 19, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 20, "column": { "name": "c_uuid", "length": -1, @@ -33271,7 +33123,7 @@ } }, { - "number": 21, + "number": 16, "column": { "name": "c_cidr", "length": -1, @@ -33286,7 +33138,7 @@ } }, { - "number": 22, + "number": 17, "column": { "name": "c_inet", "length": -1, @@ -33301,7 +33153,7 @@ } }, { - "number": 23, + "number": 18, "column": { "name": "c_macaddr", "length": -1, @@ -33322,7 +33174,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33504,64 +33356,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33629,7 +33423,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33799,64 +33593,6 @@ }, "originalName": "c_interval" }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - }, - "originalName": "c_char" - }, - { - "name": "c_varchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_character_varying", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "varchar" - }, - "originalName": "c_character_varying" - }, - { - "name": "c_bpchar", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "bpchar" - }, - "originalName": "c_bpchar" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, { "name": "c_uuid", "length": -1, @@ -33914,7 +33650,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -33954,6 +33690,328 @@ "cmd": ":exec", "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "comments": [ + " String types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.bpchar" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_varchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 3, + "column": { + "name": "c_character_varying", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "c_character_varying" + } + }, + { + "number": 4, + "column": { + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_string_types" + } + }, + { + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_char", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bpchar" + }, + "originalName": "c_char" + }, + { + "name": "c_varchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_character_varying", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "schema": "pg_catalog", + "name": "varchar" + }, + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index b91b9065..292856e1 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner╖ъ public"∙publicТ +./dist/LocalRunnerЎъ public"╕publicч postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -34,15 +34,7 @@ pg_catalog timestampM pg_catalog timestamptzA c_interval0         Rpostgres_typesb -pg_cataloginterval; -c_char0         Rpostgres_typesb -pg_catalogbpchar? - c_varchar0         Rpostgres_typesb -pg_catalogvarcharI -c_character_varying0         Rpostgres_typesb -pg_catalogvarchar1 -c_bpchar0         Rpostgres_typesbbpchar- -c_text0         Rpostgres_typesbtext- +pg_cataloginterval- c_cidr0         Rpostgres_typesbcidr- c_inet0         Rpostgres_typesbinet3 c_macaddr0         Rpostgres_typesb macaddr5 @@ -50,7 +42,16 @@ pg_catalogvarchar1 c_macaddr80         Rpostgres_typesb macaddr8- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumн +c_enum0         Rpostgres_typesbc_enumч +postgres_string_typesB +c_char0         Rpostgres_string_typesb +pg_catalogbpcharF + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharP +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarchar8 +c_bpchar0         Rpostgres_string_typesbbpchar4 +c_text0         Rpostgres_string_typesbtextн postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10218,8 +10219,8 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir· -й +bio_type Autobiography BiographyMemoir╫ +о INSERT INTO postgres_types ( c_boolean, @@ -10237,11 +10238,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10266,16 +10262,11 @@ VALUES ( $14, $15, $16, - $17, + $17::c_enum, $18, $19, - $20, - $21, - $22::c_enum, - $23, - $24, - $25::macaddr, - $26::macaddr8 + $20::macaddr, + $21::macaddr8 )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10296,21 +10287,16 @@ c_smallint*TP c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*PL -c_char0         8Rpublicpostgres_typesbpg_catalog.bpcharzc_char*WS - c_varchar0         8Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*kg -c_character_varying0         8Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*IE -c_bpchar0         8Rpublicpostgres_typesbbpcharzc_bpchar*C? -c_text0         8Rpublicpostgres_typesbtextzc_text*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# +c_interval*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum*C? +c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? +c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! + c_macaddr0         b macaddr*'# c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesЗ -▄INSERT INTO postgres_types +macaddr82 Basic types : query.sqlBpostgres_typesю +сINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10326,11 +10312,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10354,12 +10335,7 @@ VALUES ( $15, $16, $17, - $18, - $19, - $20, - $21, - $22, - $23 + $18 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10379,17 +10355,12 @@ c_smallint*RN c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*NJ -c_char0         Rpublicpostgres_typesbpg_catalog.bpcharzc_char*UQ - c_varchar0         Rpublicpostgres_typesbpg_catalog.varcharz c_varchar*ie -c_character_varying0         Rpublicpostgres_typesbpg_catalog.varcharzc_character_varying*GC -c_bpchar0         Rpublicpostgres_typesbbpcharzc_bpchar*A= -c_text0         Rpublicpostgres_typesbtextzc_text*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_typesу -└SELECT +c_interval*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= +c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= +c_inet0         Rpublicpostgres_typesbinetzc_inet*JF + c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ +ЄSELECT c_boolean, c_bit, c_smallint, @@ -10405,11 +10376,6 @@ c_interval*NJ c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -10449,23 +10415,15 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 c_enum0         Rpostgres_typesbc_enumzc_enum"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! -c_macaddr80         btext: query.sqlд -·SELECT +c_macaddr80         btext: query.sqlг +▐SELECT c_smallint, c_boolean, c_integer, @@ -10480,11 +10438,6 @@ c_macaddr80 c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10506,11 +10459,6 @@ GROUP BY c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -10545,32 +10493,87 @@ pg_catalog timestamptzzc_timestamp_with_tz"M c_interval0         Rpostgres_typesb pg_catalogintervalz -c_interval"C -c_char0         Rpostgres_typesb -pg_catalogbpcharzc_char"J - c_varchar0         Rpostgres_typesb -pg_catalogvarcharz c_varchar"^ -c_character_varying0         Rpostgres_typesb -pg_catalogvarcharzc_character_varying"; -c_bpchar0         Rpostgres_typesbbpcharzc_bpchar"5 -c_text0         Rpostgres_typesbtextzc_text"5 +c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"5 c_cidr0         Rpostgres_typesbcidrzc_cidr"5 c_inet0         Rpostgres_typesbinetzc_inet"> c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql░ -ЖSELECT +cnt0         @bbigint: query.sql╤ +зSELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM postgres_typesGetPostgresFunctions:one"( +FROM postgres_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b anyarray"* max_timestamp0         @b anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sql╨ +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +П +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) +VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypes:exec*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text2 String types : query.sqlBpostgres_string_typesв +ПINSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) VALUES ($1, $2, $3, $4, $5)InsertPostgresStringTypesBatch :copyfrom*UQ +c_char0         Rpublicpostgres_string_typesbpg_catalog.bpcharzc_char*\X + c_varchar0         Rpublicpostgres_string_typesbpg_catalog.varcharz c_varchar*pl +c_character_varying0         Rpublicpostgres_string_typesbpg_catalog.varcharzc_character_varying*NJ +c_bpchar0         Rpublicpostgres_string_typesbbpcharzc_bpchar*HD +c_text0         Rpublicpostgres_string_typesbtextzc_text: query.sqlBpostgres_string_typesХ +bSELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1GetPostgresStringTypes:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text: query.sqlU +$TRUNCATE TABLE postgres_string_typesTruncatePostgresStringTypes:exec: query.sql╕ +сSELECT + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text, + COUNT(*) AS cnt +FROM postgres_string_types +GROUP BY + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +LIMIT 1GetPostgresStringTypesCnt:one"J +c_char0         Rpostgres_string_typesb +pg_catalogbpcharzc_char"Q + c_varchar0         Rpostgres_string_typesb +pg_catalogvarcharz c_varchar"e +c_character_varying0         Rpostgres_string_typesb +pg_catalogvarcharzc_character_varying"B +c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< +c_text0         Rpostgres_string_typesbtextzc_text" +cnt0         @bbigint: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index e418c76d..92a0ba59 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -18,11 +18,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -46,11 +41,6 @@ VALUES ( sqlc.narg('c_timestamp'), sqlc.narg('c_timestamp_with_tz'), sqlc.narg('c_interval'), - sqlc.narg('c_char'), - sqlc.narg('c_varchar'), - sqlc.narg('c_character_varying'), - sqlc.narg('c_bpchar'), - sqlc.narg('c_text'), sqlc.narg('c_uuid'), sqlc.narg('c_enum')::c_enum, sqlc.narg('c_cidr'), @@ -76,11 +66,6 @@ INSERT INTO postgres_types c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -104,12 +89,7 @@ VALUES ( $15, $16, $17, - $18, - $19, - $20, - $21, - $22, - $23 + $18 ); -- name: GetPostgresTypes :one @@ -129,11 +109,6 @@ SELECT c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_enum, c_cidr, @@ -159,11 +134,6 @@ SELECT c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -185,11 +155,6 @@ GROUP BY c_timestamp, c_timestamp_with_tz, c_interval, - c_char, - c_varchar, - c_character_varying, - c_bpchar, - c_text, c_uuid, c_cidr, c_inet, @@ -201,11 +166,58 @@ SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp -FROM postgres_types; +FROM postgres_types +CROSS JOIN postgres_string_types; -- name: TruncatePostgresTypes :exec TRUNCATE TABLE postgres_types; +/* String types */ + +-- name: InsertPostgresStringTypes :exec +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) +VALUES ($1, $2, $3, $4, $5); + +-- name: InsertPostgresStringTypesBatch :copyfrom +INSERT INTO postgres_string_types +( + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +) VALUES ($1, $2, $3, $4, $5); + +-- name: GetPostgresStringTypes :one +SELECT * FROM postgres_string_types LIMIT 1; + +-- name: TruncatePostgresStringTypes :exec +TRUNCATE TABLE postgres_string_types; + +-- name: GetPostgresStringTypesCnt :one +SELECT + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text, + COUNT(*) AS cnt +FROM postgres_string_types +GROUP BY + c_char, + c_varchar, + c_character_varying, + c_bpchar, + c_text +LIMIT 1; + /* Unstructured types */ -- name: InsertPostgresUnstructuredTypes :exec diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index 0cd1c9f1..a18595e7 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -22,13 +22,6 @@ CREATE TABLE postgres_types ( c_timestamp_with_tz TIMESTAMP WITH TIME ZONE, c_interval INTERVAL, - /* String Data Type Syntax */ - c_char CHAR, - c_varchar VARCHAR(100), - c_character_varying CHARACTER VARYING(100), - c_bpchar BPCHAR(100), - c_text TEXT, - /* Network Address Data Types */ c_cidr CIDR, c_inet INET, @@ -40,6 +33,14 @@ CREATE TABLE postgres_types ( c_enum c_enum ); +CREATE TABLE postgres_string_types ( + c_char CHAR, + c_varchar VARCHAR(100), + c_character_varying CHARACTER VARYING(100), + c_bpchar BPCHAR(100), + c_text TEXT +); + CREATE TABLE postgres_unstructured_types ( c_json JSON, c_json_string_override JSON, From c2b21265f24a017f33b51c957cc7c310faf5c553 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Tue, 19 Aug 2025 23:52:32 +0200 Subject: [PATCH 25/33] feat: support full text search data types --- Drivers/NpgsqlDriver.cs | 25 ++++++- docs/04_Postgres.md | 4 +- end2end/EndToEndScaffold/Config.cs | 3 + .../Templates/PostgresTests.cs | 34 ++++++++++ .../NpgsqlDapperTester.generated.cs | 23 +++++++ .../EndToEndTests/NpgsqlTester.generated.cs | 23 +++++++ .../NpgsqlDapperTester.generated.cs | 23 +++++++ .../NpgsqlTester.generated.cs | 23 +++++++ examples/NpgsqlDapperExample/QuerySql.cs | 30 +++++++++ examples/NpgsqlDapperExample/Utils.cs | 2 + examples/NpgsqlDapperExample/request.json | 65 +++++++++++++++++++ examples/NpgsqlDapperExample/request.message | 22 ++++++- .../NpgsqlDapperLegacyExample/QuerySql.cs | 30 +++++++++ examples/NpgsqlDapperLegacyExample/Utils.cs | 2 + .../NpgsqlDapperLegacyExample/request.json | 65 +++++++++++++++++++ .../NpgsqlDapperLegacyExample/request.message | 22 ++++++- examples/NpgsqlExample/QuerySql.cs | 56 ++++++++++++++++ examples/NpgsqlExample/request.json | 65 +++++++++++++++++++ examples/NpgsqlExample/request.message | 22 ++++++- examples/NpgsqlLegacyExample/QuerySql.cs | 65 +++++++++++++++++++ examples/NpgsqlLegacyExample/request.json | 65 +++++++++++++++++++ examples/NpgsqlLegacyExample/request.message | 22 ++++++- examples/config/postgresql/types/query.sql | 15 +++++ examples/config/postgresql/types/schema.sql | 5 ++ 24 files changed, 704 insertions(+), 7 deletions(-) diff --git a/Drivers/NpgsqlDriver.cs b/Drivers/NpgsqlDriver.cs index 8e7b1f01..7ca821f5 100644 --- a/Drivers/NpgsqlDriver.cs +++ b/Drivers/NpgsqlDriver.cs @@ -76,7 +76,8 @@ public NpgsqlDriver( ["float"] = new( new() { - { "float4", new() } + { "float4", new() }, + { "real", new() } }, readerFn: ordinal => $"reader.GetFloat({ordinal})", readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})" @@ -286,6 +287,28 @@ public NpgsqlDriver( sqlMapper: "SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler());" ), + /* Full-text search data types */ + ["NpgsqlTsQuery"] = new( + new() + { + { "tsquery", new() } + }, + readerFn: ordinal => $"reader.GetFieldValue({ordinal})", + readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", + usingDirective: "NpgsqlTypes", + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlTsQuery), new NpgsqlTypeHandler());" + ), + ["NpgsqlTsVector"] = new( + new() + { + { "tsvector", new() } + }, + readerFn: ordinal => $"reader.GetFieldValue({ordinal})", + readerArrayFn: ordinal => $"reader.GetFieldValue({ordinal})", + usingDirective: "NpgsqlTypes", + sqlMapper: "SqlMapper.AddTypeHandler(typeof(NpgsqlTsVector), new NpgsqlTypeHandler());" + ), + /* Other data types */ ["Guid"] = new( new() diff --git a/docs/04_Postgres.md b/docs/04_Postgres.md index 6b66fb0c..a3861b7d 100644 --- a/docs/04_Postgres.md +++ b/docs/04_Postgres.md @@ -59,8 +59,8 @@ we consider support for the different data types separately for batch inserts an | inet | тЬЕ | тЬЕ | | macaddr | тЬЕ | тЬЕ | | macaddr8 | тЬЕ | тЭМ | -| tsvector | тЭМ | тЭМ | -| tsquery | тЭМ | тЭМ | +| tsvector | тЬЕ | тЭМ | +| tsquery | тЬЕ | тЭМ | | uuid | тЬЕ | тЬЕ | | json | тЬЕ | тЭМ | | jsonb | тЬЕ | тЭМ | diff --git a/end2end/EndToEndScaffold/Config.cs b/end2end/EndToEndScaffold/Config.cs index 62dbef09..771ee6bb 100644 --- a/end2end/EndToEndScaffold/Config.cs +++ b/end2end/EndToEndScaffold/Config.cs @@ -42,6 +42,7 @@ public enum KnownTestType PostgresArrayDataTypes, PostgresDataTypesOverride, PostgresGuidDataTypes, + PostgresFullTextSearchDataTypes, PostgresStringCopyFrom, PostgresTransaction, @@ -209,6 +210,7 @@ internal static class Config KnownTestType.PostgresXmlDataTypes, KnownTestType.PostgresInvalidXml, KnownTestType.PostgresEnumDataType, + KnownTestType.PostgresFullTextSearchDataTypes, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, @@ -255,6 +257,7 @@ internal static class Config KnownTestType.PostgresXmlDataTypes, KnownTestType.PostgresInvalidXml, KnownTestType.PostgresEnumDataType, + KnownTestType.PostgresFullTextSearchDataTypes, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 993ff402..98a67831 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -1110,6 +1110,40 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } """ + }, + [KnownTestType.PostgresFullTextSearchDataTypes] = new TestImpl + { + Impl = $$""" + [Test] + [Obsolete] + public async Task TestPostgresFullTextSearchDataTypes() + { + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs + { + CText = "Hello world" + }); + + var actual = await QuerySql.GetPostgresStringTypesTextSearch( + new QuerySql.GetPostgresStringTypesTextSearchArgs { ToTsquery = "Hello" }); + + var expected = new QuerySql.GetPostgresStringTypesTextSearchRow + { + CText = "Hello world", + Query = new NpgsqlTsQueryLexeme("hello"), + Tsv = NpgsqlTsVector.Parse("hello:1 world:2"), + Rnk = 0.07f + }; + AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); + + void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QuerySql.GetPostgresStringTypesTextSearchRow y) + { + Assert.That(y.CText, Is.EqualTo(x.CText)); + Assert.That(y.Query.ToString(), Is.EqualTo(x.Query.ToString())); + Assert.That(y.Tsv.ToString(), Is.EqualTo(x.Tsv.ToString())); + Assert.That(y.Rnk, Is.AtMost(x.Rnk)); + } + } + """ } }; } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 4213093b..fa6bc1ba 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -504,6 +504,29 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + [Obsolete] + public async Task TestPostgresFullTextSearchDataTypes() + { + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); + var actual = await QuerySql.GetPostgresStringTypesTextSearch(new QuerySql.GetPostgresStringTypesTextSearchArgs { ToTsquery = "Hello" }); + var expected = new QuerySql.GetPostgresStringTypesTextSearchRow + { + CText = "Hello world", + Query = new NpgsqlTsQueryLexeme("hello"), + Tsv = NpgsqlTsVector.Parse("hello:1 world:2"), + Rnk = 0.07f + }; + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QuerySql.GetPostgresStringTypesTextSearchRow y) + { + Assert.That(y.CText, Is.EqualTo(x.CText)); + Assert.That(y.Query.ToString(), Is.EqualTo(x.Query.ToString())); + Assert.That(y.Tsv.ToString(), Is.EqualTo(x.Tsv.ToString())); + Assert.That(y.Rnk, Is.AtMost(x.Rnk)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 53246f34..cd8435e4 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -504,6 +504,29 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + [Obsolete] + public async Task TestPostgresFullTextSearchDataTypes() + { + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); + var actual = await QuerySql.GetPostgresStringTypesTextSearch(new QuerySql.GetPostgresStringTypesTextSearchArgs { ToTsquery = "Hello" }); + var expected = new QuerySql.GetPostgresStringTypesTextSearchRow + { + CText = "Hello world", + Query = new NpgsqlTsQueryLexeme("hello"), + Tsv = NpgsqlTsVector.Parse("hello:1 world:2"), + Rnk = 0.07f + }; + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QuerySql.GetPostgresStringTypesTextSearchRow y) + { + Assert.That(y.CText, Is.EqualTo(x.CText)); + Assert.That(y.Query.ToString(), Is.EqualTo(x.Query.ToString())); + Assert.That(y.Tsv.ToString(), Is.EqualTo(x.Tsv.ToString())); + Assert.That(y.Rnk, Is.AtMost(x.Rnk)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 465e2cb4..3aa04a00 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -504,6 +504,29 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + [Obsolete] + public async Task TestPostgresFullTextSearchDataTypes() + { + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); + var actual = await QuerySql.GetPostgresStringTypesTextSearch(new QuerySql.GetPostgresStringTypesTextSearchArgs { ToTsquery = "Hello" }); + var expected = new QuerySql.GetPostgresStringTypesTextSearchRow + { + CText = "Hello world", + Query = new NpgsqlTsQueryLexeme("hello"), + Tsv = NpgsqlTsVector.Parse("hello:1 world:2"), + Rnk = 0.07f + }; + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QuerySql.GetPostgresStringTypesTextSearchRow y) + { + Assert.That(y.CText, Is.EqualTo(x.CText)); + Assert.That(y.Query.ToString(), Is.EqualTo(x.Query.ToString())); + Assert.That(y.Tsv.ToString(), Is.EqualTo(x.Tsv.ToString())); + Assert.That(y.Rnk, Is.AtMost(x.Rnk)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 45eec99d..22621e87 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -504,6 +504,29 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } } + [Test] + [Obsolete] + public async Task TestPostgresFullTextSearchDataTypes() + { + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); + var actual = await QuerySql.GetPostgresStringTypesTextSearch(new QuerySql.GetPostgresStringTypesTextSearchArgs { ToTsquery = "Hello" }); + var expected = new QuerySql.GetPostgresStringTypesTextSearchRow + { + CText = "Hello world", + Query = new NpgsqlTsQueryLexeme("hello"), + Tsv = NpgsqlTsVector.Parse("hello:1 world:2"), + Rnk = 0.07f + }; + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QuerySql.GetPostgresStringTypesTextSearchRow y) + { + Assert.That(y.CText, Is.EqualTo(x.CText)); + Assert.That(y.Query.ToString(), Is.EqualTo(x.Query.ToString())); + Assert.That(y.Tsv.ToString(), Is.EqualTo(x.Tsv.ToString())); + Assert.That(y.Rnk, Is.AtMost(x.Rnk)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index c4543e44..f19fddf0 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -409,6 +409,36 @@ public class GetPostgresStringTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); } + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow + { + public string? CText { get; init; } + public required NpgsqlTsQuery Query { get; init; } + public required NpgsqlTsVector Tsv { get; init; } + public required float Rnk { get; init; } + }; + public class GetPostgresStringTypesTextSearchArgs + { + public required string ToTsquery { get; init; } + }; + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("to_tsquery", args.ToTsquery); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperExample/Utils.cs b/examples/NpgsqlDapperExample/Utils.cs index 03167caa..b44b6de3 100644 --- a/examples/NpgsqlDapperExample/Utils.cs +++ b/examples/NpgsqlDapperExample/Utils.cs @@ -60,6 +60,8 @@ public static void ConfigureSqlMapper() SqlMapper.AddTypeHandler(typeof(NpgsqlCidr), new NpgsqlTypeHandler()); SqlMapper.AddTypeHandler(typeof(IPAddress), new NpgsqlTypeHandler()); SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlTsQuery), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlTsVector), new NpgsqlTypeHandler()); } private class NpgsqlTypeHandler : SqlMapper.TypeHandler where T : notnull diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index ad65f941..67007e5a 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -34012,6 +34012,71 @@ ], "filename": "query.sql" }, + { + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", + "cmd": ":one", + "columns": [ + { + "name": "c_text", + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "query", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsquery" + }, + "originalName": "query" + }, + { + "name": "tsv", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsvector" + }, + "originalName": "tsv" + }, + { + "name": "rnk", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "real" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 6040cf78..41e914a3 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -10573,7 +10573,27 @@ pg_catalogvarcharz c_varchar"e pg_catalogvarcharzc_character_varying"B c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< c_text0         Rpostgres_string_typesbtextzc_text" -cnt0         @bbigint: query.sql╨ +cnt0         @bbigint: query.sqlь +╪WITH txt_query AS ( + SELECT + c_text, + to_tsquery('english', $1) AS query, + to_tsvector('english', c_text) AS tsv + FROM postgres_string_types + WHERE c_text @@ to_tsquery('english', $1) +) + +SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk +FROM txt_query +ORDER BY rnk DESC +LIMIT 1 GetPostgresStringTypesTextSearch:one"0 +c_text0         R  txt_querybtextzc_text"3 +query0         R  txt_queryb tsqueryzquery"0 +tsv0         R  txt_queryb +tsvectorztsv" +rnk0         @breal*%! + +to_tsquery0         btext: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index ed8940c4..5416b75e 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -410,6 +410,36 @@ public async Task GetPostgresStringTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); } + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow + { + public string CText { get; set; } + public NpgsqlTsQuery Query { get; set; } + public NpgsqlTsVector Tsv { get; set; } + public float Rnk { get; set; } + }; + public class GetPostgresStringTypesTextSearchArgs + { + public string ToTsquery { get; set; } + }; + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("to_tsquery", args.ToTsquery); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperLegacyExample/Utils.cs b/examples/NpgsqlDapperLegacyExample/Utils.cs index 603c6891..5ec33447 100644 --- a/examples/NpgsqlDapperLegacyExample/Utils.cs +++ b/examples/NpgsqlDapperLegacyExample/Utils.cs @@ -61,6 +61,8 @@ public static void ConfigureSqlMapper() SqlMapper.AddTypeHandler(typeof(NpgsqlCidr), new NpgsqlTypeHandler()); SqlMapper.AddTypeHandler(typeof(IPAddress), new NpgsqlTypeHandler()); SqlMapper.AddTypeHandler(typeof(PhysicalAddress), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlTsQuery), new NpgsqlTypeHandler()); + SqlMapper.AddTypeHandler(typeof(NpgsqlTsVector), new NpgsqlTypeHandler()); } private class NpgsqlTypeHandler : SqlMapper.TypeHandler diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 91db06a8..3326f872 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -34012,6 +34012,71 @@ ], "filename": "query.sql" }, + { + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", + "cmd": ":one", + "columns": [ + { + "name": "c_text", + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "query", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsquery" + }, + "originalName": "query" + }, + { + "name": "tsv", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsvector" + }, + "originalName": "tsv" + }, + { + "name": "rnk", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "real" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 7ebcf1f2..7f4ee26e 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -10573,7 +10573,27 @@ pg_catalogvarcharz c_varchar"e pg_catalogvarcharzc_character_varying"B c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< c_text0         Rpostgres_string_typesbtextzc_text" -cnt0         @bbigint: query.sql╨ +cnt0         @bbigint: query.sqlь +╪WITH txt_query AS ( + SELECT + c_text, + to_tsquery('english', $1) AS query, + to_tsvector('english', c_text) AS tsv + FROM postgres_string_types + WHERE c_text @@ to_tsquery('english', $1) +) + +SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk +FROM txt_query +ORDER BY rnk DESC +LIMIT 1 GetPostgresStringTypesTextSearch:one"0 +c_text0         R  txt_querybtextzc_text"3 +query0         R  txt_queryb tsqueryzquery"0 +tsv0         R  txt_queryb +tsvectorztsv" +rnk0         @breal*%! + +to_tsquery0         btext: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index e91c824a..31b7cb26 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -596,6 +596,62 @@ public async Task TruncatePostgresStringTypes() return null; } + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public readonly record struct GetPostgresStringTypesTextSearchRow(string? CText, NpgsqlTsQuery Query, NpgsqlTsVector Tsv, float Rnk); + public readonly record struct GetPostgresStringTypesTextSearchArgs(string ToTsquery); + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) + { + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesTextSearchRow + { + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesTextSearchSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesTextSearchRow + { + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) + }; + } + } + } + + return null; + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index 062fd68c..574eca38 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -34012,6 +34012,71 @@ ], "filename": "query.sql" }, + { + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", + "cmd": ":one", + "columns": [ + { + "name": "c_text", + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "query", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsquery" + }, + "originalName": "query" + }, + { + "name": "tsv", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsvector" + }, + "originalName": "tsv" + }, + { + "name": "rnk", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "real" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 77647b3b..8f4c0007 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -10573,7 +10573,27 @@ pg_catalogvarcharz c_varchar"e pg_catalogvarcharzc_character_varying"B c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< c_text0         Rpostgres_string_typesbtextzc_text" -cnt0         @bbigint: query.sql╨ +cnt0         @bbigint: query.sqlь +╪WITH txt_query AS ( + SELECT + c_text, + to_tsquery('english', $1) AS query, + to_tsvector('english', c_text) AS tsv + FROM postgres_string_types + WHERE c_text @@ to_tsquery('english', $1) +) + +SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk +FROM txt_query +ORDER BY rnk DESC +LIMIT 1 GetPostgresStringTypesTextSearch:one"0 +c_text0         R  txt_querybtextzc_text"3 +query0         R  txt_queryb tsqueryzquery"0 +tsv0         R  txt_queryb +tsvectorztsv" +rnk0         @breal*%! + +to_tsquery0         btext: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index f610eb53..ddf48d48 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -718,6 +718,71 @@ public async Task GetPostgresStringTypesCnt() return null; } + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow + { + public string CText { get; set; } + public NpgsqlTsQuery Query { get; set; } + public NpgsqlTsVector Tsv { get; set; } + public float Rnk { get; set; } + }; + public class GetPostgresStringTypesTextSearchArgs + { + public string ToTsquery { get; set; } + }; + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) + { + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesTextSearchRow + { + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresStringTypesTextSearchSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesTextSearchRow + { + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) + }; + } + } + } + + return null; + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index 82002a72..35692c3f 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -34012,6 +34012,71 @@ ], "filename": "query.sql" }, + { + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", + "cmd": ":one", + "columns": [ + { + "name": "c_text", + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" + }, + { + "name": "query", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsquery" + }, + "originalName": "query" + }, + { + "name": "tsv", + "notNull": true, + "length": -1, + "table": { + "name": "txt_query" + }, + "type": { + "name": "tsvector" + }, + "originalName": "tsv" + }, + { + "name": "rnk", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "real" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index 292856e1..42a1d0e1 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -10573,7 +10573,27 @@ pg_catalogvarcharz c_varchar"e pg_catalogvarcharzc_character_varying"B c_bpchar0         Rpostgres_string_typesbbpcharzc_bpchar"< c_text0         Rpostgres_string_typesbtextzc_text" -cnt0         @bbigint: query.sql╨ +cnt0         @bbigint: query.sqlь +╪WITH txt_query AS ( + SELECT + c_text, + to_tsquery('english', $1) AS query, + to_tsvector('english', c_text) AS tsv + FROM postgres_string_types + WHERE c_text @@ to_tsquery('english', $1) +) + +SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk +FROM txt_query +ORDER BY rnk DESC +LIMIT 1 GetPostgresStringTypesTextSearch:one"0 +c_text0         R  txt_querybtextzc_text"3 +query0         R  txt_queryb tsqueryzquery"0 +tsv0         R  txt_queryb +tsvectorztsv" +rnk0         @breal*%! + +to_tsquery0         btext: query.sql╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index 92a0ba59..19550a1f 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -218,6 +218,21 @@ GROUP BY c_text LIMIT 1; +-- name: GetPostgresStringTypesTextSearch :one +WITH txt_query AS ( + SELECT + c_text, + to_tsquery('english', $1) AS query, + to_tsvector('english', c_text) AS tsv + FROM postgres_string_types + WHERE c_text @@ to_tsquery('english', $1) +) + +SELECT txt_query.*, ts_rank(tsv, query) AS rnk +FROM txt_query +ORDER BY rnk DESC +LIMIT 1; + /* Unstructured types */ -- name: InsertPostgresUnstructuredTypes :exec diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index a18595e7..01881e1c 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -41,6 +41,11 @@ CREATE TABLE postgres_string_types ( c_text TEXT ); +CREATE EXTENSION "pg_trgm"; +CREATE EXTENSION "btree_gin"; + +CREATE INDEX postgres_txt_idx ON postgres_string_types USING GIN (c_text); + CREATE TABLE postgres_unstructured_types ( c_json JSON, c_json_string_override JSON, From dddbdbfccbc610d85acc79549f7ee1e42d96f351 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 22 Aug 2025 21:46:18 +0200 Subject: [PATCH 26/33] fix: move test types config to constants --- end2end/EndToEndScaffold/Config.cs | 337 +++++------- .../MySqlConnectorDapperTester.generated.cs | 244 ++++----- .../MySqlConnectorTester.generated.cs | 244 ++++----- .../NpgsqlDapperTester.generated.cs | 482 +++++++++--------- .../EndToEndTests/NpgsqlTester.generated.cs | 482 +++++++++--------- .../MySqlConnectorDapperTester.generated.cs | 244 ++++----- .../MySqlConnectorTester.generated.cs | 244 ++++----- .../NpgsqlDapperTester.generated.cs | 482 +++++++++--------- .../NpgsqlTester.generated.cs | 482 +++++++++--------- 9 files changed, 1579 insertions(+), 1662 deletions(-) diff --git a/end2end/EndToEndScaffold/Config.cs b/end2end/EndToEndScaffold/Config.cs index 771ee6bb..1d06ac6b 100644 --- a/end2end/EndToEndScaffold/Config.cs +++ b/end2end/EndToEndScaffold/Config.cs @@ -35,61 +35,158 @@ public enum KnownTestType SqliteTransactionRollback, // Postgres + PostgresTransaction, + PostgresTransactionRollback, + ArrayAsParam, + MultipleArraysAsParams, + PostgresDataTypesOverride, + PostgresInvalidJson, + PostgresInvalidXml, + + // Data types PostgresStringDataTypes, PostgresIntegerDataTypes, PostgresFloatingPointDataTypes, PostgresDateTimeDataTypes, PostgresArrayDataTypes, - PostgresDataTypesOverride, PostgresGuidDataTypes, PostgresFullTextSearchDataTypes, + PostgresNetworkDataTypes, + PostgresGeoDataTypes, + PostgresJsonDataTypes, + PostgresXmlDataTypes, + PostgresEnumDataType, + // :copyfrom (Batch) PostgresStringCopyFrom, - PostgresTransaction, - PostgresTransactionRollback, PostgresIntegerCopyFrom, PostgresFloatingPointCopyFrom, PostgresDateTimeCopyFrom, PostgresGuidCopyFrom, PostgresNetworkCopyFrom, PostgresArrayCopyFrom, - PostgresGeoDataTypes, PostgresGeoCopyFrom, - PostgresNetworkDataTypes, - PostgresJsonDataTypes, - PostgresInvalidJson, - PostgresXmlDataTypes, - PostgresInvalidXml, - PostgresEnumDataType, - - ArrayAsParam, - MultipleArraysAsParams, // MySql - MySqlStringDataTypes, - MySqlIntegerDataTypes, MySqlTransaction, MySqlTransactionRollback, + MySqlDataTypesOverride, + MySqlScopedSchemaEnum, + MySqlInvalidJson, + + // Data types + MySqlStringDataTypes, + MySqlIntegerDataTypes, MySqlFloatingPointDataTypes, MySqlDateTimeDataTypes, MySqlBinaryDataTypes, MySqlEnumDataType, - MySqlDataTypesOverride, - MySqlScopedSchemaEnum, MySqlJsonDataTypes, - MySqlJsonCopyFrom, - MySqlInvalidJson, + // :copyfrom (Batch) MySqlStringCopyFrom, MySqlIntegerCopyFrom, MySqlFloatingPointCopyFrom, MySqlDateTimeCopyFrom, MySqlBinaryCopyFrom, - MySqlEnumCopyFrom + MySqlEnumCopyFrom, + MySqlJsonCopyFrom } internal static class Config { + private static readonly SortedSet _mysqlTestTypes = [ + KnownTestType.One, + KnownTestType.Many, + KnownTestType.Exec, + KnownTestType.ExecRows, + KnownTestType.ExecLastId, + KnownTestType.JoinEmbed, + KnownTestType.SelfJoinEmbed, + KnownTestType.Slice, + KnownTestType.MultipleSlices, + KnownTestType.NargNull, + KnownTestType.NargNotNull, + KnownTestType.MySqlStringDataTypes, + KnownTestType.MySqlIntegerDataTypes, + KnownTestType.MySqlTransaction, + KnownTestType.MySqlTransactionRollback, + KnownTestType.MySqlFloatingPointDataTypes, + KnownTestType.MySqlDateTimeDataTypes, + KnownTestType.MySqlBinaryDataTypes, + KnownTestType.MySqlEnumDataType, + KnownTestType.MySqlScopedSchemaEnum, + KnownTestType.MySqlJsonDataTypes, + KnownTestType.MySqlInvalidJson, + KnownTestType.MySqlJsonCopyFrom, + KnownTestType.MySqlDataTypesOverride, + KnownTestType.MySqlStringCopyFrom, + KnownTestType.MySqlIntegerCopyFrom, + KnownTestType.MySqlFloatingPointCopyFrom, + KnownTestType.MySqlDateTimeCopyFrom, + KnownTestType.MySqlBinaryCopyFrom, + KnownTestType.MySqlEnumCopyFrom + ]; + + private static readonly SortedSet _postgresTestTypes = [ + KnownTestType.One, + KnownTestType.Many, + KnownTestType.Exec, + KnownTestType.ExecRows, + KnownTestType.ExecLastId, + KnownTestType.JoinEmbed, + KnownTestType.SelfJoinEmbed, + KnownTestType.ArrayAsParam, + KnownTestType.MultipleArraysAsParams, + KnownTestType.NargNull, + KnownTestType.NargNotNull, + KnownTestType.PostgresTransaction, + KnownTestType.PostgresTransactionRollback, + KnownTestType.PostgresStringDataTypes, + KnownTestType.PostgresIntegerDataTypes, + KnownTestType.PostgresFloatingPointDataTypes, + KnownTestType.PostgresDateTimeDataTypes, + KnownTestType.PostgresGuidDataTypes, + KnownTestType.PostgresArrayDataTypes, + KnownTestType.PostgresGeoDataTypes, + KnownTestType.PostgresGeoCopyFrom, + KnownTestType.PostgresDataTypesOverride, + KnownTestType.PostgresJsonDataTypes, + KnownTestType.PostgresInvalidJson, + KnownTestType.PostgresNetworkDataTypes, + KnownTestType.PostgresXmlDataTypes, + KnownTestType.PostgresInvalidXml, + KnownTestType.PostgresEnumDataType, + KnownTestType.PostgresFullTextSearchDataTypes, + KnownTestType.PostgresStringCopyFrom, + KnownTestType.PostgresIntegerCopyFrom, + KnownTestType.PostgresFloatingPointCopyFrom, + KnownTestType.PostgresDateTimeCopyFrom, + KnownTestType.PostgresGuidCopyFrom, + KnownTestType.PostgresArrayCopyFrom, + KnownTestType.PostgresNetworkCopyFrom + ]; + + private static readonly SortedSet _sqliteTestTypes = [ + KnownTestType.One, + KnownTestType.Many, + KnownTestType.Exec, + KnownTestType.ExecRows, + KnownTestType.ExecLastId, + KnownTestType.JoinEmbed, + KnownTestType.SelfJoinEmbed, + KnownTestType.Slice, + KnownTestType.MultipleSlices, + KnownTestType.NargNull, + KnownTestType.NargNotNull, + KnownTestType.SqliteTransaction, + KnownTestType.SqliteTransactionRollback, + KnownTestType.SqliteDataTypes, + KnownTestType.SqliteCopyFrom, + KnownTestType.SqliteDataTypesOverride, + KnownTestType.SqliteMultipleNamedParam + ]; + public static Dictionary FilesToGenerate { get; } = new() { @@ -98,128 +195,23 @@ internal static class Config { TestNamespace = "MySqlConnectorExampleGen", LegacyTestNamespace = "MySqlConnectorLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.Slice, - KnownTestType.MultipleSlices, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - - KnownTestType.MySqlStringDataTypes, - KnownTestType.MySqlIntegerDataTypes, - KnownTestType.MySqlTransaction, - KnownTestType.MySqlTransactionRollback, - KnownTestType.MySqlFloatingPointDataTypes, - KnownTestType.MySqlDateTimeDataTypes, - KnownTestType.MySqlBinaryDataTypes, - KnownTestType.MySqlEnumDataType, - KnownTestType.MySqlScopedSchemaEnum, - KnownTestType.MySqlJsonDataTypes, - KnownTestType.MySqlInvalidJson, - KnownTestType.MySqlJsonCopyFrom, - KnownTestType.MySqlDataTypesOverride, - - KnownTestType.MySqlStringCopyFrom, - KnownTestType.MySqlIntegerCopyFrom, - KnownTestType.MySqlFloatingPointCopyFrom, - KnownTestType.MySqlDateTimeCopyFrom, - KnownTestType.MySqlBinaryCopyFrom, - KnownTestType.MySqlEnumCopyFrom - ] + TestTypes = _mysqlTestTypes } }, { "MySqlConnectorDapperTester", new ClassGenConfig - { - TestNamespace = "MySqlConnectorDapperExampleGen", - LegacyTestNamespace = "MySqlConnectorDapperLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.Slice, - KnownTestType.MultipleSlices, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - - KnownTestType.MySqlStringDataTypes, - KnownTestType.MySqlIntegerDataTypes, - KnownTestType.MySqlTransaction, - KnownTestType.MySqlTransactionRollback, - KnownTestType.MySqlFloatingPointDataTypes, - KnownTestType.MySqlDateTimeDataTypes, - KnownTestType.MySqlBinaryDataTypes, - KnownTestType.MySqlEnumDataType, - KnownTestType.MySqlScopedSchemaEnum, - KnownTestType.MySqlJsonDataTypes, - KnownTestType.MySqlInvalidJson, - KnownTestType.MySqlJsonCopyFrom, - KnownTestType.MySqlDataTypesOverride, - - KnownTestType.MySqlStringCopyFrom, - KnownTestType.MySqlIntegerCopyFrom, - KnownTestType.MySqlFloatingPointCopyFrom, - KnownTestType.MySqlDateTimeCopyFrom, - KnownTestType.MySqlBinaryCopyFrom, - KnownTestType.MySqlEnumCopyFrom - ] - } + { + TestNamespace = "MySqlConnectorDapperExampleGen", + LegacyTestNamespace = "MySqlConnectorDapperLegacyExampleGen", + TestTypes = _mysqlTestTypes + } }, { "NpgsqlTester", new ClassGenConfig { TestNamespace = "NpgsqlExampleGen", LegacyTestNamespace = "NpgsqlLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.ArrayAsParam, - KnownTestType.MultipleArraysAsParams, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - - KnownTestType.PostgresTransaction, - KnownTestType.PostgresTransactionRollback, - KnownTestType.PostgresStringDataTypes, - KnownTestType.PostgresIntegerDataTypes, - KnownTestType.PostgresFloatingPointDataTypes, - KnownTestType.PostgresDateTimeDataTypes, - KnownTestType.PostgresGuidDataTypes, - KnownTestType.PostgresArrayDataTypes, - KnownTestType.PostgresGeoDataTypes, - KnownTestType.PostgresGeoCopyFrom, - KnownTestType.PostgresDataTypesOverride, - KnownTestType.PostgresJsonDataTypes, - KnownTestType.PostgresInvalidJson, - KnownTestType.PostgresNetworkDataTypes, - KnownTestType.PostgresXmlDataTypes, - KnownTestType.PostgresInvalidXml, - KnownTestType.PostgresEnumDataType, - KnownTestType.PostgresFullTextSearchDataTypes, - - KnownTestType.PostgresStringCopyFrom, - KnownTestType.PostgresIntegerCopyFrom, - KnownTestType.PostgresFloatingPointCopyFrom, - KnownTestType.PostgresDateTimeCopyFrom, - KnownTestType.PostgresGuidCopyFrom, - KnownTestType.PostgresArrayCopyFrom, - KnownTestType.PostgresNetworkCopyFrom, - ] + TestTypes = _postgresTestTypes } }, { @@ -227,46 +219,7 @@ internal static class Config { TestNamespace = "NpgsqlDapperExampleGen", LegacyTestNamespace = "NpgsqlDapperLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.ArrayAsParam, - KnownTestType.MultipleArraysAsParams, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - KnownTestType.PostgresTransaction, - KnownTestType.PostgresTransactionRollback, - - KnownTestType.PostgresStringDataTypes, - KnownTestType.PostgresIntegerDataTypes, - KnownTestType.PostgresFloatingPointDataTypes, - KnownTestType.PostgresDateTimeDataTypes, - KnownTestType.PostgresGuidDataTypes, - KnownTestType.PostgresArrayDataTypes, - KnownTestType.PostgresGeoDataTypes, - KnownTestType.PostgresGeoCopyFrom, - KnownTestType.PostgresDataTypesOverride, - KnownTestType.PostgresJsonDataTypes, - KnownTestType.PostgresInvalidJson, - KnownTestType.PostgresNetworkDataTypes, - KnownTestType.PostgresXmlDataTypes, - KnownTestType.PostgresInvalidXml, - KnownTestType.PostgresEnumDataType, - KnownTestType.PostgresFullTextSearchDataTypes, - - KnownTestType.PostgresStringCopyFrom, - KnownTestType.PostgresIntegerCopyFrom, - KnownTestType.PostgresFloatingPointCopyFrom, - KnownTestType.PostgresDateTimeCopyFrom, - KnownTestType.PostgresGuidCopyFrom, - KnownTestType.PostgresArrayCopyFrom, - KnownTestType.PostgresNetworkCopyFrom - ] + TestTypes = _postgresTestTypes } }, { @@ -274,25 +227,7 @@ internal static class Config { TestNamespace = "SqliteExampleGen", LegacyTestNamespace = "SqliteLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.Slice, - KnownTestType.MultipleSlices, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - KnownTestType.SqliteTransaction, - KnownTestType.SqliteTransactionRollback, - KnownTestType.SqliteDataTypes, - KnownTestType.SqliteCopyFrom, - KnownTestType.SqliteDataTypesOverride, - KnownTestType.SqliteMultipleNamedParam - ] + TestTypes = _sqliteTestTypes } }, { @@ -300,25 +235,7 @@ internal static class Config { TestNamespace = "SqliteDapperExampleGen", LegacyTestNamespace = "SqliteDapperLegacyExampleGen", - TestTypes = [ - KnownTestType.One, - KnownTestType.Many, - KnownTestType.Exec, - KnownTestType.ExecRows, - KnownTestType.ExecLastId, - KnownTestType.JoinEmbed, - KnownTestType.SelfJoinEmbed, - KnownTestType.Slice, - KnownTestType.MultipleSlices, - KnownTestType.NargNull, - KnownTestType.NargNotNull, - KnownTestType.SqliteTransaction, - KnownTestType.SqliteTransactionRollback, - KnownTestType.SqliteDataTypes, - KnownTestType.SqliteCopyFrom, - KnownTestType.SqliteDataTypesOverride, - KnownTestType.SqliteMultipleNamedParam - ] + TestTypes = _sqliteTestTypes } }, }; diff --git a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs index a4a8c48d..50f7a0c3 100644 --- a/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorDapperTester.generated.cs @@ -328,6 +328,102 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestMySqlTransaction() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestMySqlTransactionRollback() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] + [TestCase(null, null, "1971-01-01 00:00:00")] + public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlFunctionsRow + { + MaxInt = cInt, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetMysqlFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public async Task TestMySqlScopedSchemaEnum() + { + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); + var expected = new QuerySql.GetFirstExtendedBioByTypeRow + { + AuthorName = "Bojack Horseman", + Name = "One Trick Pony", + BioType = BiosBioType.Memoir, + AuthorType = new HashSet + { + BiosAuthorType.Author, + BiosAuthorType.Translator + } + }; + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } + } + + [Test] + public void TestMySqlInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + } + [Test] [TestCase("&", "\u1857", "\u2649", "Sheena is a Punk Rocker", "Holiday in Cambodia", "London's Calling", "London's Burning", "Police & Thieves")] [TestCase(null, null, null, null, null, null, null, null)] @@ -392,46 +488,6 @@ void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlN } } - [Test] - public async Task TestMySqlTransaction() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestMySqlTransactionRollback() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(3.4f, -31.555666, 11.098643, 34.4424, 423.2445, 998.9994542, 21.214312452534)] [TestCase(null, null, null, null, null, null, null)] @@ -547,56 +603,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] - [TestCase(null, null, "1971-01-01 00:00:00")] - public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); - await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlFunctionsRow - { - MaxInt = cInt, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetMysqlFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) - { - Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - - [Test] - public async Task TestMySqlScopedSchemaEnum() - { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); - var expected = new QuerySql.GetFirstExtendedBioByTypeRow - { - AuthorName = "Bojack Horseman", - Name = "One Trick Pony", - BioType = BiosBioType.Memoir, - AuthorType = new HashSet - { - BiosAuthorType.Author, - BiosAuthorType.Translator - } - }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); - } - } - [Test] [TestCase("{\"age\": 42, \"name\": \"The Hitchhiker's Guide to the Galaxy\"}")] [TestCase(null)] @@ -622,38 +628,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] - [TestCase(10, null)] - public async Task TestJsonCopyFrom(int batchSize, string cJson) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlStringTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlStringTypesCntRow - { - Cnt = batchSize, - CJson = cParsedJson - }; - var actual = await QuerySql.GetMysqlStringTypesCnt(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) - { - Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - } - } - - [Test] - public void TestMySqlInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - } - [Test] [TestCase(100, "D", "\u4321", "\u2345", "Parasite", "Clockwork Orange", "Dr. Strangelove", "Interview with a Vampire", "Memento")] [TestCase(10, null, null, null, null, null, null, null, null)] @@ -850,5 +824,31 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysq Assert.That(x.CSet, Is.EqualTo(y.CSet)); } } + + [Test] + [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] + [TestCase(10, null)] + public async Task TestJsonCopyFrom(int batchSize, string cJson) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow + { + Cnt = batchSize, + CJson = cParsedJson + }; + var actual = await QuerySql.GetMysqlStringTypesCnt(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + } + } } } diff --git a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs index ebe04e3a..d51787d7 100644 --- a/end2end/EndToEndTests/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTests/MySqlConnectorTester.generated.cs @@ -328,6 +328,102 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestMySqlTransaction() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestMySqlTransactionRollback() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] + [TestCase(null, null, "1971-01-01 00:00:00")] + public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlFunctionsRow + { + MaxInt = cInt, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetMysqlFunctions(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public async Task TestMySqlScopedSchemaEnum() + { + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); + var expected = new QuerySql.GetFirstExtendedBioByTypeRow + { + AuthorName = "Bojack Horseman", + Name = "One Trick Pony", + BioType = BiosBioType.Memoir, + AuthorType = new HashSet + { + BiosAuthorType.Author, + BiosAuthorType.Translator + } + }; + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } + } + + [Test] + public void TestMySqlInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + } + [Test] [TestCase("&", "\u1857", "\u2649", "Sheena is a Punk Rocker", "Holiday in Cambodia", "London's Calling", "London's Burning", "Police & Thieves")] [TestCase(null, null, null, null, null, null, null, null)] @@ -392,46 +488,6 @@ void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlN } } - [Test] - public async Task TestMySqlTransaction() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestMySqlTransactionRollback() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(3.4f, -31.555666, 11.098643, 34.4424, 423.2445, 998.9994542, 21.214312452534)] [TestCase(null, null, null, null, null, null, null)] @@ -547,56 +603,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] - [TestCase(null, null, "1971-01-01 00:00:00")] - public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); - await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlFunctionsRow - { - MaxInt = cInt, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetMysqlFunctions(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) - { - Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - - [Test] - public async Task TestMySqlScopedSchemaEnum() - { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); - var expected = new QuerySql.GetFirstExtendedBioByTypeRow - { - AuthorName = "Bojack Horseman", - Name = "One Trick Pony", - BioType = BiosBioType.Memoir, - AuthorType = new HashSet - { - BiosAuthorType.Author, - BiosAuthorType.Translator - } - }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); - } - } - [Test] [TestCase("{\"age\": 42, \"name\": \"The Hitchhiker's Guide to the Galaxy\"}")] [TestCase(null)] @@ -622,38 +628,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] - [TestCase(10, null)] - public async Task TestJsonCopyFrom(int batchSize, string cJson) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlStringTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlStringTypesCntRow - { - Cnt = batchSize, - CJson = cParsedJson - }; - var actual = await QuerySql.GetMysqlStringTypesCnt(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) - { - Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - } - } - - [Test] - public void TestMySqlInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - } - [Test] [TestCase(100, "D", "\u4321", "\u2345", "Parasite", "Clockwork Orange", "Dr. Strangelove", "Interview with a Vampire", "Memento")] [TestCase(10, null, null, null, null, null, null, null, null)] @@ -850,5 +824,31 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysq Assert.That(x.CSet, Is.EqualTo(y.CSet)); } } + + [Test] + [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] + [TestCase(10, null)] + public async Task TestJsonCopyFrom(int batchSize, string cJson) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow + { + Cnt = batchSize, + CJson = cParsedJson + }; + var actual = await QuerySql.GetMysqlStringTypesCnt(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + } + } } } diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index fa6bc1ba..4f3152b2 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -314,6 +314,101 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestPostgresTransaction() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestPostgresTransactionRollback() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var sqlQueryWithTx = QuerySql.WithTransaction(transaction); + await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + public async Task TestArray() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); + ClassicAssert.AreEqual(2, actual.Count); + } + + [Test] + public async Task TestMultipleArrays() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); + ClassicAssert.AreEqual(1, actual.Count); + } + + [Test] + [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] + [TestCase(null, null, "1970-01-01 00:00:00")] + public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); + var expected = new QuerySql.GetPostgresFunctionsRow + { + MaxInteger = cInteger, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetPostgresFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public void TestPostgresInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + } + + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] [TestCase("E", "It takes a nation of millions to hold us back", "Rebel Without a Pause", "Master of Puppets", "Prophets of Rage")] [TestCase(null, null, null, null, null)] @@ -455,29 +550,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg } } - [Test] - [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] - [TestCase(null, null, "1970-01-01 00:00:00")] - public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); - await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); - var expected = new QuerySql.GetPostgresFunctionsRow - { - MaxInteger = cInteger, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetPostgresFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) - { - Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - private static IEnumerable PostgresGuidDataTypesTestCases { get @@ -527,6 +599,152 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QueryS } } + private static IEnumerable PostgresNetworkDataTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); + yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] + public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresTypesRow + { + CCidr = cCidr, + CInet = cInet, + CMacaddr = cMacaddr, + CMacaddr8 = cMacaddr8 + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); + Assert.That(x.CInet, Is.EqualTo(y.CInet)); + Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); + Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); + } + } + + private static IEnumerable PostgresGeoTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); + yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresGeoTypesTestCases))] + public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) + { + await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); + var expected = new QuerySql.GetPostgresGeoTypesRow + { + CPoint = cPoint, + CLine = cLine, + CLseg = cLSeg, + CBox = cBox, + CPath = cPath, + CPolygon = cPolygon, + CCircle = cCircle + }; + var actual = await QuerySql.GetPostgresGeoTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) + { + Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); + Assert.That(x.CLine, Is.EqualTo(y.CLine)); + Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); + Assert.That(x.CBox, Is.EqualTo(y.CBox)); + Assert.That(x.CPath, Is.EqualTo(y.CPath)); + Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); + Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); + } + } + + [Test] + [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] + [TestCase(null, null)] + public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CJson = cParsedJson, + CJsonb = cParsedJson, + CJsonStringOverride = cJson, + CJsonpath = cJsonpath + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); + if (x.CJsonb.HasValue) + Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); + Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); + Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); + } + } + + [Test] + [TestCase("Good morning xml, the world says hello")] + [TestCase(null)] + public async Task TestPostgresXmlDataTypes(string cXml) + { + XmlDocument parsedXml = null; + if (cXml != null) + { + parsedXml = new XmlDocument(); + parsedXml.LoadXml(cXml); + } + + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CXml = parsedXml + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + } + } + + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] @@ -556,46 +774,6 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP } } - [Test] - public async Task TestPostgresTransaction() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestPostgresTransactionRollback() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var sqlQueryWithTx = QuerySql.WithTransaction(transaction); - await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(100, true, 3, 453, -1445214231L)] [TestCase(10, null, null, null, null)] @@ -768,44 +946,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo } } - private static IEnumerable PostgresGeoTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); - yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresGeoTypesTestCases))] - public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) - { - await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); - var expected = new QuerySql.GetPostgresGeoTypesRow - { - CPoint = cPoint, - CLine = cLine, - CLseg = cLSeg, - CBox = cBox, - CPath = cPath, - CPolygon = cPolygon, - CCircle = cCircle - }; - var actual = await QuerySql.GetPostgresGeoTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) - { - Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); - Assert.That(x.CLine, Is.EqualTo(y.CLine)); - Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); - Assert.That(x.CBox, Is.EqualTo(y.CBox)); - Assert.That(x.CPath, Is.EqualTo(y.CPath)); - Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); - Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); - } - } - private static IEnumerable PostgresGeoCopyFromTestCases { get @@ -844,145 +984,5 @@ void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgre Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); } } - - private static IEnumerable PostgresNetworkDataTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); - yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] - public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow - { - CCidr = cCidr, - CInet = cInet, - CMacaddr = cMacaddr, - CMacaddr8 = cMacaddr8 - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); - Assert.That(x.CInet, Is.EqualTo(y.CInet)); - Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); - Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); - } - } - - [Test] - [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] - [TestCase(null, null)] - public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CJson = cParsedJson, - CJsonb = cParsedJson, - CJsonStringOverride = cJson, - CJsonpath = cJsonpath - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); - if (x.CJsonb.HasValue) - Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); - Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); - Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); - } - } - - [Test] - public void TestPostgresInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); - } - - [Test] - [TestCase("Good morning xml, the world says hello")] - [TestCase(null)] - public async Task TestPostgresXmlDataTypes(string cXml) - { - XmlDocument parsedXml = null; - if (cXml != null) - { - parsedXml = new XmlDocument(); - parsedXml.LoadXml(cXml); - } - - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CXml = parsedXml - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); - if (x.CXml != null) - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); - } - } - - [Test] - public void TestPostgresInvalidXml() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); - } - - [Test] - [TestCase(CEnum.Medium)] - [TestCase(null)] - public async Task TestPostgresStringTypes(CEnum? cEnum) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow - { - CEnum = cEnum - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); - } - } - - [Test] - public async Task TestArray() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); - ClassicAssert.AreEqual(2, actual.Count); - } - - [Test] - public async Task TestMultipleArrays() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); - ClassicAssert.AreEqual(1, actual.Count); - } } } diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index cd8435e4..93a2041b 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -314,6 +314,101 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestPostgresTransaction() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestPostgresTransactionRollback() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var sqlQueryWithTx = QuerySql.WithTransaction(transaction); + await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + public async Task TestArray() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); + ClassicAssert.AreEqual(2, actual.Count); + } + + [Test] + public async Task TestMultipleArrays() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); + ClassicAssert.AreEqual(1, actual.Count); + } + + [Test] + [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] + [TestCase(null, null, "1970-01-01 00:00:00")] + public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); + var expected = new QuerySql.GetPostgresFunctionsRow + { + MaxInteger = cInteger, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetPostgresFunctions(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public void TestPostgresInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + } + + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] [TestCase("E", "It takes a nation of millions to hold us back", "Rebel Without a Pause", "Master of Puppets", "Prophets of Rage")] [TestCase(null, null, null, null, null)] @@ -455,29 +550,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg } } - [Test] - [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] - [TestCase(null, null, "1970-01-01 00:00:00")] - public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); - await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); - var expected = new QuerySql.GetPostgresFunctionsRow - { - MaxInteger = cInteger, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetPostgresFunctions(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) - { - Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - private static IEnumerable PostgresGuidDataTypesTestCases { get @@ -527,6 +599,152 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QueryS } } + private static IEnumerable PostgresNetworkDataTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); + yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] + public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresTypesRow + { + CCidr = cCidr, + CInet = cInet, + CMacaddr = cMacaddr, + CMacaddr8 = cMacaddr8 + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); + Assert.That(x.CInet, Is.EqualTo(y.CInet)); + Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); + Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); + } + } + + private static IEnumerable PostgresGeoTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); + yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresGeoTypesTestCases))] + public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) + { + await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); + var expected = new QuerySql.GetPostgresGeoTypesRow + { + CPoint = cPoint, + CLine = cLine, + CLseg = cLSeg, + CBox = cBox, + CPath = cPath, + CPolygon = cPolygon, + CCircle = cCircle + }; + var actual = await QuerySql.GetPostgresGeoTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) + { + Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); + Assert.That(x.CLine, Is.EqualTo(y.CLine)); + Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); + Assert.That(x.CBox, Is.EqualTo(y.CBox)); + Assert.That(x.CPath, Is.EqualTo(y.CPath)); + Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); + Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); + } + } + + [Test] + [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] + [TestCase(null, null)] + public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CJson = cParsedJson, + CJsonb = cParsedJson, + CJsonStringOverride = cJson, + CJsonpath = cJsonpath + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); + if (x.CJsonb.HasValue) + Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); + Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); + Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); + } + } + + [Test] + [TestCase("Good morning xml, the world says hello")] + [TestCase(null)] + public async Task TestPostgresXmlDataTypes(string cXml) + { + XmlDocument parsedXml = null; + if (cXml != null) + { + parsedXml = new XmlDocument(); + parsedXml.LoadXml(cXml); + } + + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CXml = parsedXml + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + } + } + + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual.Value); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] @@ -556,46 +774,6 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP } } - [Test] - public async Task TestPostgresTransaction() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestPostgresTransactionRollback() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var sqlQueryWithTx = QuerySql.WithTransaction(transaction); - await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(100, true, 3, 453, -1445214231L)] [TestCase(10, null, null, null, null)] @@ -768,44 +946,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo } } - private static IEnumerable PostgresGeoTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); - yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresGeoTypesTestCases))] - public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) - { - await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); - var expected = new QuerySql.GetPostgresGeoTypesRow - { - CPoint = cPoint, - CLine = cLine, - CLseg = cLSeg, - CBox = cBox, - CPath = cPath, - CPolygon = cPolygon, - CCircle = cCircle - }; - var actual = await QuerySql.GetPostgresGeoTypes(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) - { - Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); - Assert.That(x.CLine, Is.EqualTo(y.CLine)); - Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); - Assert.That(x.CBox, Is.EqualTo(y.CBox)); - Assert.That(x.CPath, Is.EqualTo(y.CPath)); - Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); - Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); - } - } - private static IEnumerable PostgresGeoCopyFromTestCases { get @@ -844,145 +984,5 @@ void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgre Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); } } - - private static IEnumerable PostgresNetworkDataTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); - yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] - public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow - { - CCidr = cCidr, - CInet = cInet, - CMacaddr = cMacaddr, - CMacaddr8 = cMacaddr8 - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); - Assert.That(x.CInet, Is.EqualTo(y.CInet)); - Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); - Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); - } - } - - [Test] - [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] - [TestCase(null, null)] - public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CJson = cParsedJson, - CJsonb = cParsedJson, - CJsonStringOverride = cJson, - CJsonpath = cJsonpath - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); - if (x.CJsonb.HasValue) - Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); - Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); - Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); - } - } - - [Test] - public void TestPostgresInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); - } - - [Test] - [TestCase("Good morning xml, the world says hello")] - [TestCase(null)] - public async Task TestPostgresXmlDataTypes(string cXml) - { - XmlDocument parsedXml = null; - if (cXml != null) - { - parsedXml = new XmlDocument(); - parsedXml.LoadXml(cXml); - } - - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CXml = parsedXml - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); - if (x.CXml != null) - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); - } - } - - [Test] - public void TestPostgresInvalidXml() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); - } - - [Test] - [TestCase(CEnum.Medium)] - [TestCase(null)] - public async Task TestPostgresStringTypes(CEnum? cEnum) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow - { - CEnum = cEnum - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); - } - } - - [Test] - public async Task TestArray() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); - ClassicAssert.AreEqual(2, actual.Count); - } - - [Test] - public async Task TestMultipleArrays() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); - ClassicAssert.AreEqual(1, actual.Count); - } } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs index c49c6496..b0ad64cd 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorDapperTester.generated.cs @@ -328,6 +328,102 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestMySqlTransaction() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestMySqlTransactionRollback() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] + [TestCase(null, null, "1971-01-01 00:00:00")] + public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlFunctionsRow + { + MaxInt = cInt, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetMysqlFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public async Task TestMySqlScopedSchemaEnum() + { + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); + var expected = new QuerySql.GetFirstExtendedBioByTypeRow + { + AuthorName = "Bojack Horseman", + Name = "One Trick Pony", + BioType = BiosBioType.Memoir, + AuthorType = new HashSet + { + BiosAuthorType.Author, + BiosAuthorType.Translator + } + }; + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } + } + + [Test] + public void TestMySqlInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + } + [Test] [TestCase("&", "\u1857", "\u2649", "Sheena is a Punk Rocker", "Holiday in Cambodia", "London's Calling", "London's Burning", "Police & Thieves")] [TestCase(null, null, null, null, null, null, null, null)] @@ -392,46 +488,6 @@ void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlN } } - [Test] - public async Task TestMySqlTransaction() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestMySqlTransactionRollback() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(3.4f, -31.555666, 11.098643, 34.4424, 423.2445, 998.9994542, 21.214312452534)] [TestCase(null, null, null, null, null, null, null)] @@ -547,56 +603,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] - [TestCase(null, null, "1971-01-01 00:00:00")] - public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); - await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlFunctionsRow - { - MaxInt = cInt, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetMysqlFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) - { - Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - - [Test] - public async Task TestMySqlScopedSchemaEnum() - { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); - var expected = new QuerySql.GetFirstExtendedBioByTypeRow - { - AuthorName = "Bojack Horseman", - Name = "One Trick Pony", - BioType = BiosBioType.Memoir, - AuthorType = new HashSet - { - BiosAuthorType.Author, - BiosAuthorType.Translator - } - }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); - } - } - [Test] [TestCase("{\"age\": 42, \"name\": \"The Hitchhiker's Guide to the Galaxy\"}")] [TestCase(null)] @@ -622,38 +628,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] - [TestCase(10, null)] - public async Task TestJsonCopyFrom(int batchSize, string cJson) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlStringTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlStringTypesCntRow - { - Cnt = batchSize, - CJson = cParsedJson - }; - var actual = await QuerySql.GetMysqlStringTypesCnt(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) - { - Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - } - } - - [Test] - public void TestMySqlInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - } - [Test] [TestCase(100, "D", "\u4321", "\u2345", "Parasite", "Clockwork Orange", "Dr. Strangelove", "Interview with a Vampire", "Memento")] [TestCase(10, null, null, null, null, null, null, null, null)] @@ -850,5 +824,31 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysq Assert.That(x.CSet, Is.EqualTo(y.CSet)); } } + + [Test] + [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] + [TestCase(10, null)] + public async Task TestJsonCopyFrom(int batchSize, string cJson) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow + { + Cnt = batchSize, + CJson = cParsedJson + }; + var actual = await QuerySql.GetMysqlStringTypesCnt(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + } + } } } diff --git a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs index 54ba9842..3402c78d 100644 --- a/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/MySqlConnectorTester.generated.cs @@ -328,6 +328,102 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestMySqlTransaction() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestMySqlTransactionRollback() + { + var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] + [TestCase(null, null, "1971-01-01 00:00:00")] + public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); + await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); + await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); + var expected = new QuerySql.GetMysqlFunctionsRow + { + MaxInt = cInt, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetMysqlFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) + { + Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public async Task TestMySqlScopedSchemaEnum() + { + await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); + var expected = new QuerySql.GetFirstExtendedBioByTypeRow + { + AuthorName = "Bojack Horseman", + Name = "One Trick Pony", + BioType = BiosBioType.Memoir, + AuthorType = new HashSet + { + BiosAuthorType.Author, + BiosAuthorType.Translator + } + }; + var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) + { + Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.BioType, Is.EqualTo(y.BioType)); + Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); + } + } + + [Test] + public void TestMySqlInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + } + [Test] [TestCase("&", "\u1857", "\u2649", "Sheena is a Punk Rocker", "Holiday in Cambodia", "London's Calling", "London's Burning", "Police & Thieves")] [TestCase(null, null, null, null, null, null, null, null)] @@ -392,46 +488,6 @@ void AssertSingularEquals(QuerySql.GetMysqlNumericTypesRow x, QuerySql.GetMysqlN } } - [Test] - public async Task TestMySqlTransaction() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestMySqlTransactionRollback() - { - var connection = new MySqlConnector.MySqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.MySqlConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(3.4f, -31.555666, 11.098643, 34.4424, 423.2445, 998.9994542, 21.214312452534)] [TestCase(null, null, null, null, null, null, null)] @@ -547,56 +603,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(-54355, "Scream of the Butterfly", "2025-06-29 12:00:00")] - [TestCase(null, null, "1971-01-01 00:00:00")] - public async Task TestMySqlDataTypesOverride(int? cInt, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertMysqlNumericTypes(new QuerySql.InsertMysqlNumericTypesArgs { CInt = cInt }); - await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CVarchar = cVarchar }); - await QuerySql.InsertMysqlDatetimeTypes(new QuerySql.InsertMysqlDatetimeTypesArgs { CTimestamp = cTimestamp }); - var expected = new QuerySql.GetMysqlFunctionsRow - { - MaxInt = cInt, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetMysqlFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlFunctionsRow x, QuerySql.GetMysqlFunctionsRow y) - { - Assert.That(x.MaxInt, Is.EqualTo(y.MaxInt)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - - [Test] - public async Task TestMySqlScopedSchemaEnum() - { - await this.QuerySql.CreateExtendedBio(new QuerySql.CreateExtendedBioArgs { AuthorName = "Bojack Horseman", Name = "One Trick Pony", BioType = BiosBioType.Memoir, AuthorType = new HashSet { BiosAuthorType.Author, BiosAuthorType.Translator } }); - var expected = new QuerySql.GetFirstExtendedBioByTypeRow - { - AuthorName = "Bojack Horseman", - Name = "One Trick Pony", - BioType = BiosBioType.Memoir, - AuthorType = new HashSet - { - BiosAuthorType.Author, - BiosAuthorType.Translator - } - }; - var actual = await this.QuerySql.GetFirstExtendedBioByType(new QuerySql.GetFirstExtendedBioByTypeArgs { BioType = BiosBioType.Memoir }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetFirstExtendedBioByTypeRow x, QuerySql.GetFirstExtendedBioByTypeRow y) - { - Assert.That(x.AuthorName, Is.EqualTo(y.AuthorName)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.BioType, Is.EqualTo(y.BioType)); - Assert.That(x.AuthorType, Is.EqualTo(y.AuthorType)); - } - } - [Test] [TestCase("{\"age\": 42, \"name\": \"The Hitchhiker's Guide to the Galaxy\"}")] [TestCase(null)] @@ -622,38 +628,6 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesRow x, QuerySql.GetMysqlSt } } - [Test] - [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] - [TestCase(10, null)] - public async Task TestJsonCopyFrom(int batchSize, string cJson) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); - await QuerySql.InsertMysqlStringTypesBatch(batchArgs); - var expected = new QuerySql.GetMysqlStringTypesCntRow - { - Cnt = batchSize, - CJson = cParsedJson - }; - var actual = await QuerySql.GetMysqlStringTypesCnt(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) - { - Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - } - } - - [Test] - public void TestMySqlInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertMysqlStringTypes(new QuerySql.InsertMysqlStringTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - } - [Test] [TestCase(100, "D", "\u4321", "\u2345", "Parasite", "Clockwork Orange", "Dr. Strangelove", "Interview with a Vampire", "Memento")] [TestCase(10, null, null, null, null, null, null, null, null)] @@ -850,5 +824,31 @@ void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysq Assert.That(x.CSet, Is.EqualTo(y.CSet)); } } + + [Test] + [TestCase(100, "{\"name\": \"Swordfishtrombones\", \"year\": 1983}")] + [TestCase(10, null)] + public async Task TestJsonCopyFrom(int batchSize, string cJson) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertMysqlStringTypesBatchArgs { CJson = cParsedJson }).ToList(); + await QuerySql.InsertMysqlStringTypesBatch(batchArgs); + var expected = new QuerySql.GetMysqlStringTypesCntRow + { + Cnt = batchSize, + CJson = cParsedJson + }; + var actual = await QuerySql.GetMysqlStringTypesCnt(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetMysqlStringTypesCntRow x, QuerySql.GetMysqlStringTypesCntRow y) + { + Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + } + } } } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 3aa04a00..cebca37c 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -314,6 +314,101 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestPostgresTransaction() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestPostgresTransactionRollback() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var sqlQueryWithTx = QuerySql.WithTransaction(transaction); + await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + public async Task TestArray() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); + ClassicAssert.AreEqual(2, actual.Count); + } + + [Test] + public async Task TestMultipleArrays() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); + ClassicAssert.AreEqual(1, actual.Count); + } + + [Test] + [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] + [TestCase(null, null, "1970-01-01 00:00:00")] + public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); + var expected = new QuerySql.GetPostgresFunctionsRow + { + MaxInteger = cInteger, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetPostgresFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public void TestPostgresInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + } + + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] [TestCase("E", "It takes a nation of millions to hold us back", "Rebel Without a Pause", "Master of Puppets", "Prophets of Rage")] [TestCase(null, null, null, null, null)] @@ -455,29 +550,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg } } - [Test] - [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] - [TestCase(null, null, "1970-01-01 00:00:00")] - public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); - await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); - var expected = new QuerySql.GetPostgresFunctionsRow - { - MaxInteger = cInteger, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetPostgresFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) - { - Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - private static IEnumerable PostgresGuidDataTypesTestCases { get @@ -527,6 +599,152 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QueryS } } + private static IEnumerable PostgresNetworkDataTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); + yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] + public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresTypesRow + { + CCidr = cCidr, + CInet = cInet, + CMacaddr = cMacaddr, + CMacaddr8 = cMacaddr8 + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); + Assert.That(x.CInet, Is.EqualTo(y.CInet)); + Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); + Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); + } + } + + private static IEnumerable PostgresGeoTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); + yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresGeoTypesTestCases))] + public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) + { + await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); + var expected = new QuerySql.GetPostgresGeoTypesRow + { + CPoint = cPoint, + CLine = cLine, + CLseg = cLSeg, + CBox = cBox, + CPath = cPath, + CPolygon = cPolygon, + CCircle = cCircle + }; + var actual = await QuerySql.GetPostgresGeoTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) + { + Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); + Assert.That(x.CLine, Is.EqualTo(y.CLine)); + Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); + Assert.That(x.CBox, Is.EqualTo(y.CBox)); + Assert.That(x.CPath, Is.EqualTo(y.CPath)); + Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); + Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); + } + } + + [Test] + [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] + [TestCase(null, null)] + public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CJson = cParsedJson, + CJsonb = cParsedJson, + CJsonStringOverride = cJson, + CJsonpath = cJsonpath + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); + if (x.CJsonb.HasValue) + Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); + Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); + Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); + } + } + + [Test] + [TestCase("Good morning xml, the world says hello")] + [TestCase(null)] + public async Task TestPostgresXmlDataTypes(string cXml) + { + XmlDocument parsedXml = null; + if (cXml != null) + { + parsedXml = new XmlDocument(); + parsedXml.LoadXml(cXml); + } + + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CXml = parsedXml + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + } + } + + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] @@ -556,46 +774,6 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP } } - [Test] - public async Task TestPostgresTransaction() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestPostgresTransactionRollback() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var sqlQueryWithTx = QuerySql.WithTransaction(transaction); - await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(100, true, 3, 453, -1445214231L)] [TestCase(10, null, null, null, null)] @@ -768,44 +946,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo } } - private static IEnumerable PostgresGeoTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); - yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresGeoTypesTestCases))] - public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) - { - await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); - var expected = new QuerySql.GetPostgresGeoTypesRow - { - CPoint = cPoint, - CLine = cLine, - CLseg = cLSeg, - CBox = cBox, - CPath = cPath, - CPolygon = cPolygon, - CCircle = cCircle - }; - var actual = await QuerySql.GetPostgresGeoTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) - { - Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); - Assert.That(x.CLine, Is.EqualTo(y.CLine)); - Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); - Assert.That(x.CBox, Is.EqualTo(y.CBox)); - Assert.That(x.CPath, Is.EqualTo(y.CPath)); - Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); - Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); - } - } - private static IEnumerable PostgresGeoCopyFromTestCases { get @@ -844,145 +984,5 @@ void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgre Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); } } - - private static IEnumerable PostgresNetworkDataTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); - yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] - public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow - { - CCidr = cCidr, - CInet = cInet, - CMacaddr = cMacaddr, - CMacaddr8 = cMacaddr8 - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); - Assert.That(x.CInet, Is.EqualTo(y.CInet)); - Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); - Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); - } - } - - [Test] - [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] - [TestCase(null, null)] - public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CJson = cParsedJson, - CJsonb = cParsedJson, - CJsonStringOverride = cJson, - CJsonpath = cJsonpath - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); - if (x.CJsonb.HasValue) - Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); - Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); - Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); - } - } - - [Test] - public void TestPostgresInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); - } - - [Test] - [TestCase("Good morning xml, the world says hello")] - [TestCase(null)] - public async Task TestPostgresXmlDataTypes(string cXml) - { - XmlDocument parsedXml = null; - if (cXml != null) - { - parsedXml = new XmlDocument(); - parsedXml.LoadXml(cXml); - } - - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CXml = parsedXml - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); - if (x.CXml != null) - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); - } - } - - [Test] - public void TestPostgresInvalidXml() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); - } - - [Test] - [TestCase(CEnum.Medium)] - [TestCase(null)] - public async Task TestPostgresStringTypes(CEnum? cEnum) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow - { - CEnum = cEnum - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); - } - } - - [Test] - public async Task TestArray() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); - ClassicAssert.AreEqual(2, actual.Count); - } - - [Test] - public async Task TestMultipleArrays() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); - ClassicAssert.AreEqual(1, actual.Count); - } } } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 22621e87..bc4d7ad9 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -314,6 +314,101 @@ void AssertSingularEquals(QuerySql.GetAuthorByNamePatternRow x, QuerySql.GetAuth } } + [Test] + public async Task TestPostgresTransaction() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var querySqlWithTx = QuerySql.WithTransaction(transaction); + await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + await transaction.CommitAsync(); + var expected = new QuerySql.GetAuthorRow + { + Id = 1111, + Name = "Bojack Horseman", + Bio = "Back in the 90s he was in a very famous TV show" + }; + actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) + { + Assert.That(x.Id, Is.EqualTo(y.Id)); + Assert.That(x.Name, Is.EqualTo(y.Name)); + Assert.That(x.Bio, Is.EqualTo(y.Bio)); + } + } + + [Test] + public async Task TestPostgresTransactionRollback() + { + var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); + await connection.OpenAsync(); + var transaction = connection.BeginTransaction(); + var sqlQueryWithTx = QuerySql.WithTransaction(transaction); + await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + await transaction.RollbackAsync(); + var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); + ClassicAssert.IsNull(actual); + } + + [Test] + public async Task TestArray() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); + ClassicAssert.AreEqual(2, actual.Count); + } + + [Test] + public async Task TestMultipleArrays() + { + var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); + var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); + var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); + var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); + ClassicAssert.AreEqual(1, actual.Count); + } + + [Test] + [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] + [TestCase(null, null, "1970-01-01 00:00:00")] + public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); + var expected = new QuerySql.GetPostgresFunctionsRow + { + MaxInteger = cInteger, + MaxVarchar = cVarchar, + MaxTimestamp = cTimestamp + }; + var actual = await QuerySql.GetPostgresFunctions(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) + { + Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); + Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); + Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); + } + } + + [Test] + public void TestPostgresInvalidJson() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + } + + [Test] + public void TestPostgresInvalidXml() + { + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + } + [Test] [TestCase("E", "It takes a nation of millions to hold us back", "Rebel Without a Pause", "Master of Puppets", "Prophets of Rage")] [TestCase(null, null, null, null, null)] @@ -455,29 +550,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesRow x, QuerySql.GetPostg } } - [Test] - [TestCase(-54355, "White Light from the Mouth of Infinity", "2022-10-2 15:44:01+09:00")] - [TestCase(null, null, "1970-01-01 00:00:00")] - public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); - await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); - var expected = new QuerySql.GetPostgresFunctionsRow - { - MaxInteger = cInteger, - MaxVarchar = cVarchar, - MaxTimestamp = cTimestamp - }; - var actual = await QuerySql.GetPostgresFunctions(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgresFunctionsRow y) - { - Assert.That(x.MaxInteger, Is.EqualTo(y.MaxInteger)); - Assert.That(x.MaxVarchar, Is.EqualTo(y.MaxVarchar)); - Assert.That(x.MaxTimestamp, Is.EqualTo(y.MaxTimestamp)); - } - } - private static IEnumerable PostgresGuidDataTypesTestCases { get @@ -527,6 +599,152 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesTextSearchRow x, QueryS } } + private static IEnumerable PostgresNetworkDataTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); + yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] + public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresTypesRow + { + CCidr = cCidr, + CInet = cInet, + CMacaddr = cMacaddr, + CMacaddr8 = cMacaddr8 + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); + Assert.That(x.CInet, Is.EqualTo(y.CInet)); + Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); + Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); + } + } + + private static IEnumerable PostgresGeoTypesTestCases + { + get + { + yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); + yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); + } + } + + [Test] + [TestCaseSource(nameof(PostgresGeoTypesTestCases))] + public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) + { + await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); + var expected = new QuerySql.GetPostgresGeoTypesRow + { + CPoint = cPoint, + CLine = cLine, + CLseg = cLSeg, + CBox = cBox, + CPath = cPath, + CPolygon = cPolygon, + CCircle = cCircle + }; + var actual = await QuerySql.GetPostgresGeoTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) + { + Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); + Assert.That(x.CLine, Is.EqualTo(y.CLine)); + Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); + Assert.That(x.CBox, Is.EqualTo(y.CBox)); + Assert.That(x.CPath, Is.EqualTo(y.CPath)); + Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); + Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); + } + } + + [Test] + [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] + [TestCase(null, null)] + public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) + { + JsonElement? cParsedJson = null; + if (cJson != null) + cParsedJson = JsonDocument.Parse(cJson).RootElement; + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CJson = cParsedJson, + CJsonb = cParsedJson, + CJsonStringOverride = cJson, + CJsonpath = cJsonpath + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); + if (x.CJson.HasValue) + Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); + Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); + if (x.CJsonb.HasValue) + Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); + Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); + Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); + } + } + + [Test] + [TestCase("Good morning xml, the world says hello")] + [TestCase(null)] + public async Task TestPostgresXmlDataTypes(string cXml) + { + XmlDocument parsedXml = null; + if (cXml != null) + { + parsedXml = new XmlDocument(); + parsedXml.LoadXml(cXml); + } + + await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresUnstructuredTypesRow + { + CXml = parsedXml + }; + var actual = await QuerySql.GetPostgresUnstructuredTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + { + Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); + if (x.CXml != null) + Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); + } + } + + [Test] + [TestCase(CEnum.Medium)] + [TestCase(null)] + public async Task TestPostgresStringTypes(CEnum? cEnum) + { + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresTypesRow + { + CEnum = cEnum + }; + var actual = await QuerySql.GetPostgresTypes(); + AssertSingularEquals(expected, actual); + void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + { + Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); + } + } + [Test] [TestCase(100, "z", "Sex Pistols", "Anarchy in the U.K", "Yoshimi Battles the Pink Robots", "Never Mind the Bollocks...")] [TestCase(10, null, null, null, null, null)] @@ -556,46 +774,6 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP } } - [Test] - public async Task TestPostgresTransaction() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var querySqlWithTx = QuerySql.WithTransaction(transaction); - await querySqlWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - await transaction.CommitAsync(); - var expected = new QuerySql.GetAuthorRow - { - Id = 1111, - Name = "Bojack Horseman", - Bio = "Back in the 90s he was in a very famous TV show" - }; - actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetAuthorRow x, QuerySql.GetAuthorRow y) - { - Assert.That(x.Id, Is.EqualTo(y.Id)); - Assert.That(x.Name, Is.EqualTo(y.Name)); - Assert.That(x.Bio, Is.EqualTo(y.Bio)); - } - } - - [Test] - public async Task TestPostgresTransactionRollback() - { - var connection = new Npgsql.NpgsqlConnection(Environment.GetEnvironmentVariable(EndToEndCommon.PostgresConnectionStringEnv)); - await connection.OpenAsync(); - var transaction = connection.BeginTransaction(); - var sqlQueryWithTx = QuerySql.WithTransaction(transaction); - await sqlQueryWithTx.CreateAuthor(new QuerySql.CreateAuthorArgs { Id = 1111, Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - await transaction.RollbackAsync(); - var actual = await QuerySql.GetAuthor(new QuerySql.GetAuthorArgs { Name = "Bojack Horseman" }); - ClassicAssert.IsNull(actual); - } - [Test] [TestCase(100, true, 3, 453, -1445214231L)] [TestCase(10, null, null, null, null)] @@ -768,44 +946,6 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo } } - private static IEnumerable PostgresGeoTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlPoint(1, 2), new NpgsqlLine(3, 4, 5), new NpgsqlLSeg(1, 2, 3, 4), new NpgsqlBox(1, 2, 3, 4), new NpgsqlPath(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlPolygon(new NpgsqlPoint[] { new NpgsqlPoint(1, 2), new NpgsqlPoint(3, 4) }), new NpgsqlCircle(1, 2, 3)).SetName("Valid Geo Types"); - yield return new TestCaseData(null, null, null, null, null, null, null).SetName("Null Geo Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresGeoTypesTestCases))] - public async Task TestPostgresGeoTypes(NpgsqlPoint? cPoint, NpgsqlLine? cLine, NpgsqlLSeg? cLSeg, NpgsqlBox? cBox, NpgsqlPath? cPath, NpgsqlPolygon? cPolygon, NpgsqlCircle? cCircle) - { - await QuerySql.InsertPostgresGeoTypes(new QuerySql.InsertPostgresGeoTypesArgs { CPoint = cPoint, CLine = cLine, CLseg = cLSeg, CBox = cBox, CPath = cPath, CPolygon = cPolygon, CCircle = cCircle }); - var expected = new QuerySql.GetPostgresGeoTypesRow - { - CPoint = cPoint, - CLine = cLine, - CLseg = cLSeg, - CBox = cBox, - CPath = cPath, - CPolygon = cPolygon, - CCircle = cCircle - }; - var actual = await QuerySql.GetPostgresGeoTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgresGeoTypesRow y) - { - Assert.That(x.CPoint, Is.EqualTo(y.CPoint)); - Assert.That(x.CLine, Is.EqualTo(y.CLine)); - Assert.That(x.CLseg, Is.EqualTo(y.CLseg)); - Assert.That(x.CBox, Is.EqualTo(y.CBox)); - Assert.That(x.CPath, Is.EqualTo(y.CPath)); - Assert.That(x.CPolygon, Is.EqualTo(y.CPolygon)); - Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); - } - } - private static IEnumerable PostgresGeoCopyFromTestCases { get @@ -844,145 +984,5 @@ void AssertSingularEquals(QuerySql.GetPostgresGeoTypesRow x, QuerySql.GetPostgre Assert.That(x.CCircle, Is.EqualTo(y.CCircle)); } } - - private static IEnumerable PostgresNetworkDataTypesTestCases - { - get - { - yield return new TestCaseData(new NpgsqlCidr("192.168.1.0/24"), new IPAddress(new byte[] { 192, 168, 1, 1 }), new PhysicalAddress(new byte[] { 0x00, 0x1A, 0x2B, 0x3C, 0x4D, 0x5E }), "00:1a:2b:ff:fe:3c:4d:5e").SetName("Valid Network Data Types"); - yield return new TestCaseData(null, null, null, null).SetName("Null Network Data Types"); - } - } - - [Test] - [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] - public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow - { - CCidr = cCidr, - CInet = cInet, - CMacaddr = cMacaddr, - CMacaddr8 = cMacaddr8 - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); - Assert.That(x.CInet, Is.EqualTo(y.CInet)); - Assert.That(x.CMacaddr, Is.EqualTo(y.CMacaddr)); - Assert.That(x.CMacaddr8, Is.EqualTo(y.CMacaddr8)); - } - } - - [Test] - [TestCase("{\"name\": \"Swordfishtrombones\", \"year\": 1983}", "$.\"name\"")] - [TestCase(null, null)] - public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) - { - JsonElement? cParsedJson = null; - if (cJson != null) - cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CJson = cParsedJson, - CJsonb = cParsedJson, - CJsonStringOverride = cJson, - CJsonpath = cJsonpath - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); - if (x.CJson.HasValue) - Assert.That(x.CJson.Value.GetRawText(), Is.EqualTo(y.CJson.Value.GetRawText())); - Assert.That(x.CJsonb.HasValue, Is.EqualTo(y.CJsonb.HasValue)); - if (x.CJsonb.HasValue) - Assert.That(x.CJsonb.Value.GetRawText(), Is.EqualTo(y.CJsonb.Value.GetRawText())); - Assert.That(x.CJsonStringOverride, Is.EqualTo(y.CJsonStringOverride)); - Assert.That(x.CJsonpath, Is.EqualTo(y.CJsonpath)); - } - } - - [Test] - public void TestPostgresInvalidJson() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); - } - - [Test] - [TestCase("Good morning xml, the world says hello")] - [TestCase(null)] - public async Task TestPostgresXmlDataTypes(string cXml) - { - XmlDocument parsedXml = null; - if (cXml != null) - { - parsedXml = new XmlDocument(); - parsedXml.LoadXml(cXml); - } - - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow - { - CXml = parsedXml - }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) - { - Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); - if (x.CXml != null) - Assert.That(x.CXml.OuterXml, Is.EqualTo(y.CXml.OuterXml)); - } - } - - [Test] - public void TestPostgresInvalidXml() - { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); - } - - [Test] - [TestCase(CEnum.Medium)] - [TestCase(null)] - public async Task TestPostgresStringTypes(CEnum? cEnum) - { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow - { - CEnum = cEnum - }; - var actual = await QuerySql.GetPostgresTypes(); - AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) - { - Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); - } - } - - [Test] - public async Task TestArray() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIds(new QuerySql.GetAuthorsByIdsArgs { LongArr1 = new[] { id1, bojackId } }); - ClassicAssert.AreEqual(2, actual.Count); - } - - [Test] - public async Task TestMultipleArrays() - { - var id1 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Quote that everyone always attribute to Einstein" }); - var id2 = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Albert Einstein", Bio = "Only 2 things are infinite, the universe and human stupidity" }); - var bojackId = await this.QuerySql.CreateAuthorReturnId(new QuerySql.CreateAuthorReturnIdArgs { Name = "Bojack Horseman", Bio = "Back in the 90s he was in a very famous TV show" }); - var actual = await QuerySql.GetAuthorsByIdsAndNames(new QuerySql.GetAuthorsByIdsAndNamesArgs { LongArr1 = new[] { id1, bojackId }, StringArr2 = new[] { "Albert Einstein" } }); - ClassicAssert.AreEqual(1, actual.Count); - } } } From 3e5ac793affb9caea3c0648a170b052ff571c895 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 22 Aug 2025 22:31:38 +0200 Subject: [PATCH 27/33] fix: more separate data types queries for Postgtres --- end2end/EndToEndScaffold/Config.cs | 40 +- .../Templates/PostgresTests.cs | 43 +- end2end/EndToEndTests/NpgsqlDapperTester.cs | 2 + .../NpgsqlDapperTester.generated.cs | 41 +- end2end/EndToEndTests/NpgsqlTester.cs | 2 + .../EndToEndTests/NpgsqlTester.generated.cs | 41 +- .../EndToEndTestsLegacy/NpgsqlDapperTester.cs | 2 + .../NpgsqlDapperTester.generated.cs | 41 +- end2end/EndToEndTestsLegacy/NpgsqlTester.cs | 2 + .../NpgsqlTester.generated.cs | 41 +- examples/NpgsqlDapperExample/Models.cs | 24 +- examples/NpgsqlDapperExample/QuerySql.cs | 308 ++++- examples/NpgsqlDapperExample/request.json | 1145 +++++++++-------- examples/NpgsqlDapperExample/request.message | 319 ++--- examples/NpgsqlDapperLegacyExample/Models.cs | 24 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 308 ++++- .../NpgsqlDapperLegacyExample/request.json | 1145 +++++++++-------- .../NpgsqlDapperLegacyExample/request.message | 319 ++--- examples/NpgsqlExample/Models.cs | 4 +- examples/NpgsqlExample/QuerySql.cs | 486 +++++-- examples/NpgsqlExample/request.json | 1145 +++++++++-------- examples/NpgsqlExample/request.message | 319 ++--- examples/NpgsqlLegacyExample/Models.cs | 24 +- examples/NpgsqlLegacyExample/QuerySql.cs | 564 ++++++-- examples/NpgsqlLegacyExample/request.json | 1145 +++++++++-------- examples/NpgsqlLegacyExample/request.message | 319 ++--- examples/config/postgresql/types/query.sql | 168 ++- examples/config/postgresql/types/schema.sql | 28 +- 28 files changed, 4898 insertions(+), 3151 deletions(-) diff --git a/end2end/EndToEndScaffold/Config.cs b/end2end/EndToEndScaffold/Config.cs index 1d06ac6b..f87f7d72 100644 --- a/end2end/EndToEndScaffold/Config.cs +++ b/end2end/EndToEndScaffold/Config.cs @@ -96,30 +96,38 @@ public enum KnownTestType internal static class Config { private static readonly SortedSet _mysqlTestTypes = [ + KnownTestType.MySqlTransaction, + KnownTestType.MySqlTransactionRollback, + KnownTestType.MySqlDataTypesOverride, + KnownTestType.MySqlScopedSchemaEnum, + KnownTestType.MySqlInvalidJson, + + // query annotations KnownTestType.One, KnownTestType.Many, KnownTestType.Exec, KnownTestType.ExecRows, KnownTestType.ExecLastId, + + // macros KnownTestType.JoinEmbed, KnownTestType.SelfJoinEmbed, KnownTestType.Slice, KnownTestType.MultipleSlices, KnownTestType.NargNull, KnownTestType.NargNotNull, + + // Data types KnownTestType.MySqlStringDataTypes, KnownTestType.MySqlIntegerDataTypes, - KnownTestType.MySqlTransaction, - KnownTestType.MySqlTransactionRollback, KnownTestType.MySqlFloatingPointDataTypes, KnownTestType.MySqlDateTimeDataTypes, KnownTestType.MySqlBinaryDataTypes, KnownTestType.MySqlEnumDataType, - KnownTestType.MySqlScopedSchemaEnum, KnownTestType.MySqlJsonDataTypes, - KnownTestType.MySqlInvalidJson, + + // :copyfrom (Batch) KnownTestType.MySqlJsonCopyFrom, - KnownTestType.MySqlDataTypesOverride, KnownTestType.MySqlStringCopyFrom, KnownTestType.MySqlIntegerCopyFrom, KnownTestType.MySqlFloatingPointCopyFrom, @@ -129,19 +137,27 @@ internal static class Config ]; private static readonly SortedSet _postgresTestTypes = [ + KnownTestType.PostgresTransaction, + KnownTestType.PostgresTransactionRollback, + KnownTestType.ArrayAsParam, + KnownTestType.MultipleArraysAsParams, + KnownTestType.PostgresInvalidJson, + KnownTestType.PostgresInvalidXml, + + // query annotations KnownTestType.One, KnownTestType.Many, KnownTestType.Exec, KnownTestType.ExecRows, KnownTestType.ExecLastId, + + // macros KnownTestType.JoinEmbed, KnownTestType.SelfJoinEmbed, - KnownTestType.ArrayAsParam, - KnownTestType.MultipleArraysAsParams, KnownTestType.NargNull, KnownTestType.NargNotNull, - KnownTestType.PostgresTransaction, - KnownTestType.PostgresTransactionRollback, + + // Data types KnownTestType.PostgresStringDataTypes, KnownTestType.PostgresIntegerDataTypes, KnownTestType.PostgresFloatingPointDataTypes, @@ -149,15 +165,15 @@ internal static class Config KnownTestType.PostgresGuidDataTypes, KnownTestType.PostgresArrayDataTypes, KnownTestType.PostgresGeoDataTypes, - KnownTestType.PostgresGeoCopyFrom, KnownTestType.PostgresDataTypesOverride, KnownTestType.PostgresJsonDataTypes, - KnownTestType.PostgresInvalidJson, KnownTestType.PostgresNetworkDataTypes, KnownTestType.PostgresXmlDataTypes, - KnownTestType.PostgresInvalidXml, KnownTestType.PostgresEnumDataType, KnownTestType.PostgresFullTextSearchDataTypes, + + // :copyfrom (Batch) + KnownTestType.PostgresGeoCopyFrom, KnownTestType.PostgresStringCopyFrom, KnownTestType.PostgresIntegerCopyFrom, KnownTestType.PostgresFloatingPointCopyFrom, diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 98a67831..e8a0c760 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -148,7 +148,7 @@ public async Task TestPostgresDateTimeTypes( DateTime? cTimestampWithTz, TimeSpan? cInterval) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CDate = cDate, CTime = cTime, @@ -157,7 +157,7 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CInterval = cInterval }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresDateTimeTypesRow { CDate = cDate, CTime = cTime, @@ -165,10 +165,10 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresDateTimeTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesRow x, QuerySql.GetPostgresDateTimeTypesRow y) { Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CTime, Is.EqualTo(y.CTime)); @@ -428,7 +428,7 @@ public async Task TestDateTimeCopyFrom( cTimestampWithTzAsUtc = DateTime.SpecifyKind(cTimestampWithTz.Value, DateTimeKind.Utc); var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresDateTimeTypesBatchArgs { CDate = cDate, CTime = cTime, @@ -437,8 +437,8 @@ public async Task TestDateTimeCopyFrom( CInterval = cInterval }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + await QuerySql.InsertPostgresDateTimeTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresDateTimeTypesCntRow { Cnt = batchSize, CDate = cDate, @@ -447,10 +447,10 @@ public async Task TestDateTimeCopyFrom( CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresDateTimeTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesCntRow x, QuerySql.GetPostgresDateTimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -560,7 +560,7 @@ public async Task TestPostgresNetworkDataTypes( PhysicalAddress cMacaddr, string cMacaddr8) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresNetworkTypes(new QuerySql.InsertPostgresNetworkTypesArgs { CCidr = cCidr, CInet = cInet, @@ -568,7 +568,7 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresNetworkTypesRow { CCidr = cCidr, CInet = cInet, @@ -576,10 +576,10 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CMacaddr8 = cMacaddr8 }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNetworkTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPostgresNetworkTypesRow y) { Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); Assert.That(x.CInet, Is.EqualTo(y.CInet)); @@ -870,7 +870,10 @@ public async Task TestPostgresDataTypesOverride( { await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { - CInteger = cInteger, + CInteger = cInteger + }); + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs + { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs @@ -1055,26 +1058,26 @@ public async Task TestPostgresNetworkCopyFrom( PhysicalAddress cMacaddr) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresNetworkTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); + await QuerySql.InsertPostgresNetworkTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var expected = new QuerySql.GetPostgresNetworkTypesCntRow { Cnt = batchSize, CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNetworkTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.GetPostgresNetworkTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); @@ -1115,7 +1118,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy { Impl = $$""" [Test] - [Obsolete] + [Obsolete] // due to NpgsqlTsVector.Parse usage public async Task TestPostgresFullTextSearchDataTypes() { await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.cs b/end2end/EndToEndTests/NpgsqlDapperTester.cs index 9d1a8078..c863d76c 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.cs @@ -16,7 +16,9 @@ public async Task EmptyTestsTable() await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresStringTypes(); + await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); + await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); } diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 4f3152b2..03e712e8 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -378,7 +378,8 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { @@ -490,8 +491,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, DateTime? cTimestamp, DateTime? cTimestampWithTz, TimeSpan? cInterval) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); + var expected = new QuerySql.GetPostgresDateTimeTypesRow { CDate = cDate, CTime = cTime, @@ -499,9 +500,9 @@ public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, Da CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresDateTimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesRow x, QuerySql.GetPostgresDateTimeTypesRow y) { Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CTime, Is.EqualTo(y.CTime)); @@ -577,7 +578,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } [Test] - [Obsolete] + [Obsolete] // due to NpgsqlTsVector.Parse usage public async Task TestPostgresFullTextSearchDataTypes() { await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); @@ -612,17 +613,17 @@ private static IEnumerable PostgresNetworkDataTypesTestCases [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNetworkTypes(new QuerySql.InsertPostgresNetworkTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresNetworkTypesRow { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNetworkTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPostgresNetworkTypesRow y) { Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); Assert.That(x.CInet, Is.EqualTo(y.CInet)); @@ -838,9 +839,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? DateTime? cTimestampWithTzAsUtc = null; if (cTimestampWithTz != null) cTimestampWithTzAsUtc = DateTime.SpecifyKind(cTimestampWithTz.Value, DateTimeKind.Utc); - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresDateTimeTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); + await QuerySql.InsertPostgresDateTimeTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresDateTimeTypesCntRow { Cnt = batchSize, CDate = cDate, @@ -849,9 +850,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresDateTimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesCntRow x, QuerySql.GetPostgresDateTimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -904,18 +905,18 @@ private static IEnumerable PostgresNetworkCopyFromTestCases [TestCaseSource(nameof(PostgresNetworkCopyFromTestCases))] public async Task TestPostgresNetworkCopyFrom(int batchSize, NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNetworkTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); + await QuerySql.InsertPostgresNetworkTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNetworkTypesCntRow { Cnt = batchSize, CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNetworkTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.GetPostgresNetworkTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); diff --git a/end2end/EndToEndTests/NpgsqlTester.cs b/end2end/EndToEndTests/NpgsqlTester.cs index 35340c87..522d85b7 100644 --- a/end2end/EndToEndTests/NpgsqlTester.cs +++ b/end2end/EndToEndTests/NpgsqlTester.cs @@ -16,7 +16,9 @@ public async Task EmptyTestsTables() await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresStringTypes(); + await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); + await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); } diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 93a2041b..110e0c7f 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -378,7 +378,8 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { @@ -490,8 +491,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, DateTime? cTimestamp, DateTime? cTimestampWithTz, TimeSpan? cInterval) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); + var expected = new QuerySql.GetPostgresDateTimeTypesRow { CDate = cDate, CTime = cTime, @@ -499,9 +500,9 @@ public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, Da CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresDateTimeTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesRow x, QuerySql.GetPostgresDateTimeTypesRow y) { Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CTime, Is.EqualTo(y.CTime)); @@ -577,7 +578,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } [Test] - [Obsolete] + [Obsolete] // due to NpgsqlTsVector.Parse usage public async Task TestPostgresFullTextSearchDataTypes() { await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); @@ -612,17 +613,17 @@ private static IEnumerable PostgresNetworkDataTypesTestCases [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNetworkTypes(new QuerySql.InsertPostgresNetworkTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresNetworkTypesRow { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNetworkTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPostgresNetworkTypesRow y) { Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); Assert.That(x.CInet, Is.EqualTo(y.CInet)); @@ -838,9 +839,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? DateTime? cTimestampWithTzAsUtc = null; if (cTimestampWithTz != null) cTimestampWithTzAsUtc = DateTime.SpecifyKind(cTimestampWithTz.Value, DateTimeKind.Utc); - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresDateTimeTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); + await QuerySql.InsertPostgresDateTimeTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresDateTimeTypesCntRow { Cnt = batchSize, CDate = cDate, @@ -849,9 +850,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresDateTimeTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesCntRow x, QuerySql.GetPostgresDateTimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -904,18 +905,18 @@ private static IEnumerable PostgresNetworkCopyFromTestCases [TestCaseSource(nameof(PostgresNetworkCopyFromTestCases))] public async Task TestPostgresNetworkCopyFrom(int batchSize, NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNetworkTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); + await QuerySql.InsertPostgresNetworkTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNetworkTypesCntRow { Cnt = batchSize, CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNetworkTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.GetPostgresNetworkTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs index 05ed0055..a5dbdc0a 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs @@ -16,7 +16,9 @@ public async Task EmptyTestsTable() await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresStringTypes(); + await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); + await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index cebca37c..37035592 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -378,7 +378,8 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { @@ -490,8 +491,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, DateTime? cTimestamp, DateTime? cTimestampWithTz, TimeSpan? cInterval) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); + var expected = new QuerySql.GetPostgresDateTimeTypesRow { CDate = cDate, CTime = cTime, @@ -499,9 +500,9 @@ public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, Da CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresDateTimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesRow x, QuerySql.GetPostgresDateTimeTypesRow y) { Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CTime, Is.EqualTo(y.CTime)); @@ -577,7 +578,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } [Test] - [Obsolete] + [Obsolete] // due to NpgsqlTsVector.Parse usage public async Task TestPostgresFullTextSearchDataTypes() { await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); @@ -612,17 +613,17 @@ private static IEnumerable PostgresNetworkDataTypesTestCases [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNetworkTypes(new QuerySql.InsertPostgresNetworkTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresNetworkTypesRow { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNetworkTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPostgresNetworkTypesRow y) { Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); Assert.That(x.CInet, Is.EqualTo(y.CInet)); @@ -838,9 +839,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? DateTime? cTimestampWithTzAsUtc = null; if (cTimestampWithTz != null) cTimestampWithTzAsUtc = DateTime.SpecifyKind(cTimestampWithTz.Value, DateTimeKind.Utc); - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresDateTimeTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); + await QuerySql.InsertPostgresDateTimeTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresDateTimeTypesCntRow { Cnt = batchSize, CDate = cDate, @@ -849,9 +850,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresDateTimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesCntRow x, QuerySql.GetPostgresDateTimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -904,18 +905,18 @@ private static IEnumerable PostgresNetworkCopyFromTestCases [TestCaseSource(nameof(PostgresNetworkCopyFromTestCases))] public async Task TestPostgresNetworkCopyFrom(int batchSize, NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNetworkTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); + await QuerySql.InsertPostgresNetworkTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNetworkTypesCntRow { Cnt = batchSize, CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNetworkTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.GetPostgresNetworkTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs index 6bd03fbd..434ad572 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs @@ -16,7 +16,9 @@ public async Task EmptyTestsTable() await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresStringTypes(); + await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); + await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); await QuerySql.TruncatePostgresUnstructuredTypes(); } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index bc4d7ad9..2764af55 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -378,7 +378,8 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger, CTimestamp = cTimestamp }); + await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow { @@ -490,8 +491,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, DateTime? cTimestamp, DateTime? cTimestampWithTz, TimeSpan? cInterval) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }); + var expected = new QuerySql.GetPostgresDateTimeTypesRow { CDate = cDate, CTime = cTime, @@ -499,9 +500,9 @@ public async Task TestPostgresDateTimeTypes(DateTime? cDate, TimeSpan? cTime, Da CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresDateTimeTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesRow x, QuerySql.GetPostgresDateTimeTypesRow y) { Assert.That(x.CDate, Is.EqualTo(y.CDate)); Assert.That(x.CTime, Is.EqualTo(y.CTime)); @@ -577,7 +578,7 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy } [Test] - [Obsolete] + [Obsolete] // due to NpgsqlTsVector.Parse usage public async Task TestPostgresFullTextSearchDataTypes() { await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CText = "Hello world" }); @@ -612,17 +613,17 @@ private static IEnumerable PostgresNetworkDataTypesTestCases [TestCaseSource(nameof(PostgresNetworkDataTypesTestCases))] public async Task TestPostgresNetworkDataTypes(NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr, string cMacaddr8) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNetworkTypes(new QuerySql.InsertPostgresNetworkTypesArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }); + var expected = new QuerySql.GetPostgresNetworkTypesRow { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr, CMacaddr8 = cMacaddr8 }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNetworkTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPostgresNetworkTypesRow y) { Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); Assert.That(x.CInet, Is.EqualTo(y.CInet)); @@ -838,9 +839,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? DateTime? cTimestampWithTzAsUtc = null; if (cTimestampWithTz != null) cTimestampWithTzAsUtc = DateTime.SpecifyKind(cTimestampWithTz.Value, DateTimeKind.Utc); - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresDateTimeTypesBatchArgs { CDate = cDate, CTime = cTime, CTimestamp = cTimestamp, CTimestampWithTz = cTimestampWithTzAsUtc, CInterval = cInterval }).ToList(); + await QuerySql.InsertPostgresDateTimeTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresDateTimeTypesCntRow { Cnt = batchSize, CDate = cDate, @@ -849,9 +850,9 @@ public async Task TestDateTimeCopyFrom(int batchSize, DateTime? cDate, TimeSpan? CTimestampWithTz = cTimestampWithTz, CInterval = cInterval }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresDateTimeTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresDateTimeTypesCntRow x, QuerySql.GetPostgresDateTimeTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CDate, Is.EqualTo(y.CDate)); @@ -904,18 +905,18 @@ private static IEnumerable PostgresNetworkCopyFromTestCases [TestCaseSource(nameof(PostgresNetworkCopyFromTestCases))] public async Task TestPostgresNetworkCopyFrom(int batchSize, NpgsqlCidr? cCidr, IPAddress cInet, PhysicalAddress cMacaddr) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNetworkTypesBatchArgs { CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }).ToList(); + await QuerySql.InsertPostgresNetworkTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNetworkTypesCntRow { Cnt = batchSize, CCidr = cCidr, CInet = cInet, CMacaddr = cMacaddr }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNetworkTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.GetPostgresNetworkTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CCidr, Is.EqualTo(y.CCidr)); diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index d3526aa5..fa7de08a 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -21,15 +21,6 @@ public class PostgresType public float? CReal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } }; @@ -41,6 +32,21 @@ public class PostgresStringType public string? CBpchar { get; init; } public string? CText { get; init; } }; +public class PostgresDatetimeType +{ + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } +}; +public class PostgresNetworkType +{ + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } +}; public class PostgresUnstructuredType { public JsonElement? CJson { get; init; } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index f19fddf0..70322a46 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -43,7 +43,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; init; } @@ -56,17 +56,8 @@ public class InsertPostgresTypesArgs public decimal? CDecimal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } }; public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { @@ -81,17 +72,8 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_decimal", args.CDecimal); queryParams.Add("c_double_precision", args.CDoublePrecision); queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -104,7 +86,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; init; } @@ -116,15 +98,7 @@ public class InsertPostgresTypesBatchArgs public decimal? CDecimal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } }; public async Task InsertPostgresTypesBatch(List args) { @@ -145,15 +119,7 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CDecimal); await writer.WriteAsync(row.CDoublePrecision); await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); await writer.WriteAsync(row.CUuid); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); } await writer.CompleteAsync(); @@ -163,7 +129,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; init; } @@ -176,17 +142,8 @@ public class GetPostgresTypesRow public decimal? CDecimal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } }; public async Task GetPostgresTypes() { @@ -204,7 +161,7 @@ public class GetPostgresTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; init; } @@ -216,15 +173,7 @@ public class GetPostgresTypesCntRow public decimal? CDecimal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } public Guid? CUuid { get; init; } - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } public required long Cnt { get; init; } }; public async Task GetPostgresTypesCnt() @@ -243,7 +192,7 @@ public class GetPostgresTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; init; } @@ -439,6 +388,253 @@ public class GetPostgresStringTypesTextSearchArgs return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); } + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs + { + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + }; + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow + { + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + }; + public async Task GetPostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow + { + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + public required long Cnt { get; init; } + }; + public async Task GetPostgresDateTimeTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs + { + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + }; + public async Task InsertPostgresDateTimeTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs + { + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } + }; + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow + { + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } + }; + public async Task GetPostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow + { + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public required long Cnt { get; init; } + }; + public async Task GetPostgresNetworkTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs + { + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + }; + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index 67007e5a..fe14429b 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -140,177 +140,191 @@ } }, { - "name": "c_date", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "date" + "name": "uuid" } }, { - "name": "c_time", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "time" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_timestamp", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "bpchar" } }, { - "name": "c_timestamp_with_tz", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "varchar" } }, { - "name": "c_interval", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "varchar" } }, { - "name": "c_cidr", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "bpchar" } }, { - "name": "c_inet", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "postgres_datetime_types" + }, + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" } }, { - "name": "c_macaddr", + "name": "c_time", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "time" } }, { - "name": "c_macaddr8", + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "timestamp" } }, { - "name": "c_uuid", + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "timestamptz" } }, { - "name": "c_enum", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "interval" } } ] }, { "rel": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "columns": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - } - }, - { - "name": "c_varchar", + "name": "c_cidr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "cidr" } }, { - "name": "c_character_varying", + "name": "c_inet", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_bpchar", + "name": "c_macaddr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "bpchar" + "name": "macaddr" } }, { - "name": "c_text", + "name": "c_macaddr8", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr8" } } ] @@ -32561,7 +32575,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32727,86 +32741,6 @@ }, { "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, "column": { "name": "c_uuid", "length": -1, @@ -32822,7 +32756,7 @@ } }, { - "number": 17, + "number": 12, "column": { "name": "c_enum", "length": -1, @@ -32830,58 +32764,6 @@ "name": "c_enum" } } - }, - { - "number": 18, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 19, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 20, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 21, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } } ], "comments": [ @@ -32893,7 +32775,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33034,81 +32916,6 @@ }, { "number": 10, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 11, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 12, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 14, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 15, "column": { "name": "c_uuid", "length": -1, @@ -33121,51 +32928,6 @@ }, "originalName": "c_uuid" } - }, - { - "number": 16, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 17, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 18, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } } ], "filename": "query.sql", @@ -33174,7 +32936,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33297,65 +33059,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33377,53 +33080,12 @@ "name": "c_enum" }, "originalName": "c_enum" - }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, - { - "name": "c_macaddr8", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33534,65 +33196,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33604,39 +33207,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, { "name": "cnt", "notNull": true, @@ -33650,7 +33220,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -34077,6 +33647,563 @@ ], "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "comments": [ + " DateTime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + }, + { + "number": 4, + "column": { + "name": "c_macaddr8", + "length": -1, + "type": { + "name": "macaddr8" + } + } + } + ], + "comments": [ + " Network types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 41e914a3..48d5109e 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunnerЎъ public"╕publicч +./dist/LocalRunner■ы public"└public▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -25,22 +25,6 @@ pg_catalogfloat4G c_double_precision0         Rpostgres_typesb pg_catalogfloat8/ c_money0         Rpostgres_typesbmoney- -c_date0         Rpostgres_typesbdate9 -c_time0         Rpostgres_typesb -pg_catalogtimeC - c_timestamp0         Rpostgres_typesb -pg_catalog timestampM -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzA - -c_interval0         Rpostgres_typesb -pg_cataloginterval- -c_cidr0         Rpostgres_typesbcidr- -c_inet0         Rpostgres_typesbinet3 - c_macaddr0         Rpostgres_typesb macaddr5 - -c_macaddr80         Rpostgres_typesb -macaddr8- c_uuid0         Rpostgres_typesbuuid/ c_enum0         Rpostgres_typesbc_enumч postgres_string_typesB @@ -51,7 +35,25 @@ pg_catalogvarcharP c_character_varying0         Rpostgres_string_typesb pg_catalogvarchar8 c_bpchar0         Rpostgres_string_typesbbpchar4 -c_text0         Rpostgres_string_typesbtextн +c_text0         Rpostgres_string_typesbtextЙ +postgres_datetime_types6 +c_date0         Rpostgres_datetime_typesbdateB +c_time0         Rpostgres_datetime_typesb +pg_catalogtimeL + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampV +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzJ + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalД +postgres_network_types5 +c_cidr0         Rpostgres_network_typesbcidr5 +c_inet0         Rpostgres_network_typesbinet; + c_macaddr0         Rpostgres_network_typesb macaddr= + +c_macaddr80         Rpostgres_network_typesb +macaddr8н postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10219,8 +10221,9 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╫ -о +bio_type Autobiography BiographyMemoir╧ + +┴ INSERT INTO postgres_types ( c_boolean, @@ -10233,17 +10236,8 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8 + c_enum ) VALUES ( $1, @@ -10257,16 +10251,7 @@ VALUES ( $9, $10, $11, - $12, - $13, - $14, - $15, - $16, - $17::c_enum, - $18, - $19, - $20::macaddr, - $21::macaddr8 + $12::c_enum )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10281,22 +10266,9 @@ c_smallint*TP c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F B c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_date0         8Rpublicpostgres_typesbdatezc_date*N J -c_time0         8Rpublicpostgres_typesbpg_catalog.timezc_time*] Y - c_timestamp0         8Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*ok -c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV - -c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# - -c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesю -сINSERT INTO postgres_types +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  +c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ +ШINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10307,15 +10279,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid ) VALUES ( $1, @@ -10327,15 +10291,7 @@ VALUES ( $7, $8, $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - $17, - $18 + $10 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10349,18 +10305,8 @@ c_smallint*RN c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ c_money0         Rpublicpostgres_typesbmoneyzc_money*A = -c_date0         Rpublicpostgres_typesbdatezc_date*L H -c_time0         Rpublicpostgres_typesbpg_catalog.timezc_time*[ W - c_timestamp0         Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*m i -c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT - -c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ -ЄSELECT +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ +╒SELECT c_boolean, c_bit, c_smallint, @@ -10371,17 +10317,8 @@ c_interval*A= c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8::TEXT AS c_macaddr8 + c_enum FROM postgres_types LIMIT 1GetPostgresTypes:one"G c_boolean0         Rpostgres_typesb @@ -10405,25 +10342,9 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! - -c_macaddr80         btext: query.sqlг -▐SELECT +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД +ьSELECT c_smallint, c_boolean, c_integer, @@ -10433,15 +10354,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_cidr, - c_inet, - c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY @@ -10454,15 +10367,7 @@ GROUP BY c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid LIMIT 1GetPostgresTypesCnt:one"I c_smallint0         Rpostgres_typesb @@ -10483,28 +10388,15 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql╤ -зSELECT +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sqlЇ +╩SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types -CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( +CROSS JOIN postgres_string_types +CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b @@ -10593,7 +10485,134 @@ LIMIT 1 GetPostgresStringTypesTextSearch:one"0 tsvectorztsv" rnk0         @breal*%! -to_tsquery0         btext: query.sql╨ +to_tsquery0         btext: query.sql╪ +Х +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5)InsertPostgresDateTimeTypes:exec*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval2 DateTime types : query.sqlBpostgres_datetime_types┴ +hSELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1GetPostgresDateTimeTypes:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval: query.sqlY +&TRUNCATE TABLE postgres_datetime_typesTruncatePostgresDateTimeTypes:exec: query.sqlш +ыSELECT + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval, + COUNT(*) AS cnt +FROM postgres_datetime_types +GROUP BY + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +LIMIT 1GetPostgresDateTimeTypesCnt:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval" +cnt0         @bbigint: query.sql╬ +ФINSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5) InsertPostgresDateTimeTypesBatch :copyfrom*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval: query.sqlBpostgres_datetime_typesЙ +Ф +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr, + c_macaddr8 +) VALUES ( + $1, + $2, + $3, + $4::macaddr8 +)InsertPostgresNetworkTypes:exec*KG +c_cidr0         8R publicpostgres_network_typesbcidrzc_cidr*KG +c_inet0         8R publicpostgres_network_typesbinetzc_inet*TP + c_macaddr0         8R publicpostgres_network_typesb macaddrz c_macaddr*'# + +c_macaddr80         b +macaddr82 Network types : query.sqlBpostgres_network_typesЙ +tSELECT + c_cidr, + c_inet, + c_macaddr, + c_macaddr8::TEXT AS c_macaddr8 +FROM postgres_network_types +LIMIT 1GetPostgresNetworkTypes:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr"! + +c_macaddr80         btext: query.sqlW +%TRUNCATE TABLE postgres_network_typesTruncatePostgresNetworkTypes:exec: query.sqlк +ФSELECT + c_cidr, + c_inet, + c_macaddr, + COUNT(*) AS cnt +FROM postgres_network_types +GROUP BY + c_cidr, + c_inet, + c_macaddr +LIMIT 1GetPostgresNetworkTypesCnt:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr" +cnt0         @bbigint: query.sqlЭ +`INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr +) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE +c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE +c_inet0         R publicpostgres_network_typesbinetzc_inet*RN + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index a6bffae3..b7d14594 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -22,15 +22,6 @@ public class PostgresType public float? CReal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; @@ -42,6 +33,21 @@ public class PostgresStringType public string CBpchar { get; set; } public string CText { get; set; } }; + public class PostgresDatetimeType + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public class PostgresNetworkType + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; public class PostgresUnstructuredType { public JsonElement? CJson { get; set; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 5416b75e..75b0c343 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -44,7 +44,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -57,17 +57,8 @@ public class InsertPostgresTypesArgs public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } }; public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { @@ -82,17 +73,8 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) queryParams.Add("c_decimal", args.CDecimal); queryParams.Add("c_double_precision", args.CDoublePrecision); queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -105,7 +87,7 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; set; } @@ -117,15 +99,7 @@ public class InsertPostgresTypesBatchArgs public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } }; public async Task InsertPostgresTypesBatch(List args) { @@ -146,15 +120,7 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CDecimal); await writer.WriteAsync(row.CDoublePrecision); await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); await writer.WriteAsync(row.CUuid); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); } await writer.CompleteAsync(); @@ -164,7 +130,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -177,17 +143,8 @@ public class GetPostgresTypesRow public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } }; public async Task GetPostgresTypes() { @@ -205,7 +162,7 @@ public async Task GetPostgresTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -217,15 +174,7 @@ public class GetPostgresTypesCntRow public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } public long Cnt { get; set; } }; public async Task GetPostgresTypesCnt() @@ -244,7 +193,7 @@ public async Task GetPostgresTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -440,6 +389,253 @@ public async Task GetPostgresStringTypesTex return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); } + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task GetPostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresDateTimeTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; + public async Task GetPostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresNetworkTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + }; + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 3326f872..2fe3fd91 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -140,177 +140,191 @@ } }, { - "name": "c_date", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "date" + "name": "uuid" } }, { - "name": "c_time", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "time" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_timestamp", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "bpchar" } }, { - "name": "c_timestamp_with_tz", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "varchar" } }, { - "name": "c_interval", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "varchar" } }, { - "name": "c_cidr", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "bpchar" } }, { - "name": "c_inet", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "postgres_datetime_types" + }, + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" } }, { - "name": "c_macaddr", + "name": "c_time", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "time" } }, { - "name": "c_macaddr8", + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "timestamp" } }, { - "name": "c_uuid", + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "timestamptz" } }, { - "name": "c_enum", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "interval" } } ] }, { "rel": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "columns": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - } - }, - { - "name": "c_varchar", + "name": "c_cidr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "cidr" } }, { - "name": "c_character_varying", + "name": "c_inet", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_bpchar", + "name": "c_macaddr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "bpchar" + "name": "macaddr" } }, { - "name": "c_text", + "name": "c_macaddr8", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr8" } } ] @@ -32561,7 +32575,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32727,86 +32741,6 @@ }, { "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, "column": { "name": "c_uuid", "length": -1, @@ -32822,7 +32756,7 @@ } }, { - "number": 17, + "number": 12, "column": { "name": "c_enum", "length": -1, @@ -32830,58 +32764,6 @@ "name": "c_enum" } } - }, - { - "number": 18, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 19, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 20, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 21, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } } ], "comments": [ @@ -32893,7 +32775,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33034,81 +32916,6 @@ }, { "number": 10, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 11, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 12, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 14, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 15, "column": { "name": "c_uuid", "length": -1, @@ -33121,51 +32928,6 @@ }, "originalName": "c_uuid" } - }, - { - "number": 16, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 17, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 18, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } } ], "filename": "query.sql", @@ -33174,7 +32936,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33297,65 +33059,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33377,53 +33080,12 @@ "name": "c_enum" }, "originalName": "c_enum" - }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, - { - "name": "c_macaddr8", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33534,65 +33196,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33604,39 +33207,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, { "name": "cnt", "notNull": true, @@ -33650,7 +33220,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -34077,6 +33647,563 @@ ], "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "comments": [ + " DateTime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + }, + { + "number": 4, + "column": { + "name": "c_macaddr8", + "length": -1, + "type": { + "name": "macaddr8" + } + } + } + ], + "comments": [ + " Network types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 7f4ee26e..e5e217d6 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunnerЎъ public"╕publicч +./dist/LocalRunner■ы public"└public▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -25,22 +25,6 @@ pg_catalogfloat4G c_double_precision0         Rpostgres_typesb pg_catalogfloat8/ c_money0         Rpostgres_typesbmoney- -c_date0         Rpostgres_typesbdate9 -c_time0         Rpostgres_typesb -pg_catalogtimeC - c_timestamp0         Rpostgres_typesb -pg_catalog timestampM -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzA - -c_interval0         Rpostgres_typesb -pg_cataloginterval- -c_cidr0         Rpostgres_typesbcidr- -c_inet0         Rpostgres_typesbinet3 - c_macaddr0         Rpostgres_typesb macaddr5 - -c_macaddr80         Rpostgres_typesb -macaddr8- c_uuid0         Rpostgres_typesbuuid/ c_enum0         Rpostgres_typesbc_enumч postgres_string_typesB @@ -51,7 +35,25 @@ pg_catalogvarcharP c_character_varying0         Rpostgres_string_typesb pg_catalogvarchar8 c_bpchar0         Rpostgres_string_typesbbpchar4 -c_text0         Rpostgres_string_typesbtextн +c_text0         Rpostgres_string_typesbtextЙ +postgres_datetime_types6 +c_date0         Rpostgres_datetime_typesbdateB +c_time0         Rpostgres_datetime_typesb +pg_catalogtimeL + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampV +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzJ + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalД +postgres_network_types5 +c_cidr0         Rpostgres_network_typesbcidr5 +c_inet0         Rpostgres_network_typesbinet; + c_macaddr0         Rpostgres_network_typesb macaddr= + +c_macaddr80         Rpostgres_network_typesb +macaddr8н postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10219,8 +10221,9 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╫ -о +bio_type Autobiography BiographyMemoir╧ + +┴ INSERT INTO postgres_types ( c_boolean, @@ -10233,17 +10236,8 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8 + c_enum ) VALUES ( $1, @@ -10257,16 +10251,7 @@ VALUES ( $9, $10, $11, - $12, - $13, - $14, - $15, - $16, - $17::c_enum, - $18, - $19, - $20::macaddr, - $21::macaddr8 + $12::c_enum )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10281,22 +10266,9 @@ c_smallint*TP c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F B c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_date0         8Rpublicpostgres_typesbdatezc_date*N J -c_time0         8Rpublicpostgres_typesbpg_catalog.timezc_time*] Y - c_timestamp0         8Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*ok -c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV - -c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# - -c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesю -сINSERT INTO postgres_types +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  +c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ +ШINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10307,15 +10279,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid ) VALUES ( $1, @@ -10327,15 +10291,7 @@ VALUES ( $7, $8, $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - $17, - $18 + $10 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10349,18 +10305,8 @@ c_smallint*RN c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ c_money0         Rpublicpostgres_typesbmoneyzc_money*A = -c_date0         Rpublicpostgres_typesbdatezc_date*L H -c_time0         Rpublicpostgres_typesbpg_catalog.timezc_time*[ W - c_timestamp0         Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*m i -c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT - -c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ -ЄSELECT +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ +╒SELECT c_boolean, c_bit, c_smallint, @@ -10371,17 +10317,8 @@ c_interval*A= c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8::TEXT AS c_macaddr8 + c_enum FROM postgres_types LIMIT 1GetPostgresTypes:one"G c_boolean0         Rpostgres_typesb @@ -10405,25 +10342,9 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! - -c_macaddr80         btext: query.sqlг -▐SELECT +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД +ьSELECT c_smallint, c_boolean, c_integer, @@ -10433,15 +10354,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_cidr, - c_inet, - c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY @@ -10454,15 +10367,7 @@ GROUP BY c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid LIMIT 1GetPostgresTypesCnt:one"I c_smallint0         Rpostgres_typesb @@ -10483,28 +10388,15 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql╤ -зSELECT +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sqlЇ +╩SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types -CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( +CROSS JOIN postgres_string_types +CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b @@ -10593,7 +10485,134 @@ LIMIT 1 GetPostgresStringTypesTextSearch:one"0 tsvectorztsv" rnk0         @breal*%! -to_tsquery0         btext: query.sql╨ +to_tsquery0         btext: query.sql╪ +Х +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5)InsertPostgresDateTimeTypes:exec*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval2 DateTime types : query.sqlBpostgres_datetime_types┴ +hSELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1GetPostgresDateTimeTypes:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval: query.sqlY +&TRUNCATE TABLE postgres_datetime_typesTruncatePostgresDateTimeTypes:exec: query.sqlш +ыSELECT + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval, + COUNT(*) AS cnt +FROM postgres_datetime_types +GROUP BY + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +LIMIT 1GetPostgresDateTimeTypesCnt:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval" +cnt0         @bbigint: query.sql╬ +ФINSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5) InsertPostgresDateTimeTypesBatch :copyfrom*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval: query.sqlBpostgres_datetime_typesЙ +Ф +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr, + c_macaddr8 +) VALUES ( + $1, + $2, + $3, + $4::macaddr8 +)InsertPostgresNetworkTypes:exec*KG +c_cidr0         8R publicpostgres_network_typesbcidrzc_cidr*KG +c_inet0         8R publicpostgres_network_typesbinetzc_inet*TP + c_macaddr0         8R publicpostgres_network_typesb macaddrz c_macaddr*'# + +c_macaddr80         b +macaddr82 Network types : query.sqlBpostgres_network_typesЙ +tSELECT + c_cidr, + c_inet, + c_macaddr, + c_macaddr8::TEXT AS c_macaddr8 +FROM postgres_network_types +LIMIT 1GetPostgresNetworkTypes:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr"! + +c_macaddr80         btext: query.sqlW +%TRUNCATE TABLE postgres_network_typesTruncatePostgresNetworkTypes:exec: query.sqlк +ФSELECT + c_cidr, + c_inet, + c_macaddr, + COUNT(*) AS cnt +FROM postgres_network_types +GROUP BY + c_cidr, + c_inet, + c_macaddr +LIMIT 1GetPostgresNetworkTypesCnt:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr" +cnt0         @bbigint: query.sqlЭ +`INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr +) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE +c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE +c_inet0         R publicpostgres_network_typesbinetzc_inet*RN + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index 778db110..c81ee428 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -9,8 +9,10 @@ using System.Xml; namespace NpgsqlExampleGen; -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8, Guid? CUuid, CEnum? CEnum); +public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); public readonly record struct PostgresStringType(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); +public readonly record struct PostgresDatetimeType(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); +public readonly record struct PostgresNetworkType(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); public readonly record struct PostgresUnstructuredType(JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index 31b7cb26..16b4c6b0 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -40,8 +40,8 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -60,17 +60,8 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -94,23 +85,14 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid); public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -130,15 +112,7 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); } await writer.CompleteAsync(); @@ -148,8 +122,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, CEnum? CEnum, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -174,17 +148,8 @@ public async Task InsertPostgresTypesBatch(List ar CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CUuid = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), - CEnum = reader.IsDBNull(16) ? null : reader.GetString(16).ToCEnum(), - CCidr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), - CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) + CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() }; } } @@ -216,17 +181,8 @@ public async Task InsertPostgresTypesBatch(List ar CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CUuid = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), - CEnum = reader.IsDBNull(16) ? null : reader.GetString(16).ToCEnum(), - CCidr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), - CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) + CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() }; } } @@ -235,8 +191,8 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; - public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, Guid? CUuid, NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, long Cnt); public async Task GetPostgresTypesCnt() { if (this.Transaction == null) @@ -260,16 +216,8 @@ public async Task InsertPostgresTypesBatch(List ar CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CUuid = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CCidr = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), - CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), - CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - Cnt = reader.GetInt64(18) + CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) }; } } @@ -300,16 +248,8 @@ public async Task InsertPostgresTypesBatch(List ar CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? null : reader.GetFieldValue(13), - CUuid = reader.IsDBNull(14) ? null : reader.GetFieldValue(14), - CCidr = reader.IsDBNull(15) ? null : reader.GetFieldValue(15), - CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), - CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - Cnt = reader.GetInt64(18) + CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) }; } } @@ -318,7 +258,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() { @@ -652,6 +592,398 @@ public async Task TruncatePostgresStringTypes() return null; } + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public readonly record struct InsertPostgresDateTimeTypesArgs(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(InsertPostgresDateTimeTypesSql)) + { + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public readonly record struct GetPostgresDateTimeTypesRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); + public async Task GetPostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesRow + { + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesRow + { + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public readonly record struct GetPostgresDateTimeTypesCntRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, long Cnt); + public async Task GetPostgresDateTimeTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesCntRow + { + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresDateTimeTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesCntRow + { + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + + return null; + } + + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresDateTimeTypesBatchArgs(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); + public async Task InsertPostgresDateTimeTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public readonly record struct InsertPostgresNetworkTypesArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) + { + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresNetworkTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public readonly record struct GetPostgresNetworkTypesRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task GetPostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNetworkTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresNetworkTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public readonly record struct GetPostgresNetworkTypesCntRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); + public async Task GetPostgresNetworkTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNetworkTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } + } + + return null; + } + + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresNetworkTypesBatchArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index 574eca38..d9a4334f 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -140,177 +140,191 @@ } }, { - "name": "c_date", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "date" + "name": "uuid" } }, { - "name": "c_time", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "time" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_timestamp", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "bpchar" } }, { - "name": "c_timestamp_with_tz", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "varchar" } }, { - "name": "c_interval", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "varchar" } }, { - "name": "c_cidr", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "bpchar" } }, { - "name": "c_inet", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "postgres_datetime_types" + }, + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" } }, { - "name": "c_macaddr", + "name": "c_time", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "time" } }, { - "name": "c_macaddr8", + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "timestamp" } }, { - "name": "c_uuid", + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "timestamptz" } }, { - "name": "c_enum", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "interval" } } ] }, { "rel": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "columns": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - } - }, - { - "name": "c_varchar", + "name": "c_cidr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "cidr" } }, { - "name": "c_character_varying", + "name": "c_inet", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_bpchar", + "name": "c_macaddr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "bpchar" + "name": "macaddr" } }, { - "name": "c_text", + "name": "c_macaddr8", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr8" } } ] @@ -32561,7 +32575,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32727,86 +32741,6 @@ }, { "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, "column": { "name": "c_uuid", "length": -1, @@ -32822,7 +32756,7 @@ } }, { - "number": 17, + "number": 12, "column": { "name": "c_enum", "length": -1, @@ -32830,58 +32764,6 @@ "name": "c_enum" } } - }, - { - "number": 18, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 19, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 20, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 21, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } } ], "comments": [ @@ -32893,7 +32775,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33034,81 +32916,6 @@ }, { "number": 10, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 11, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 12, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 14, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 15, "column": { "name": "c_uuid", "length": -1, @@ -33121,51 +32928,6 @@ }, "originalName": "c_uuid" } - }, - { - "number": 16, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 17, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 18, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } } ], "filename": "query.sql", @@ -33174,7 +32936,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33297,65 +33059,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33377,53 +33080,12 @@ "name": "c_enum" }, "originalName": "c_enum" - }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, - { - "name": "c_macaddr8", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33534,65 +33196,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33604,39 +33207,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, { "name": "cnt", "notNull": true, @@ -33650,7 +33220,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -34077,6 +33647,563 @@ ], "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "comments": [ + " DateTime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + }, + { + "number": 4, + "column": { + "name": "c_macaddr8", + "length": -1, + "type": { + "name": "macaddr8" + } + } + } + ], + "comments": [ + " Network types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 8f4c0007..f8bd06f2 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunnerЎъ public"╕publicч +./dist/LocalRunner■ы public"└public▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -25,22 +25,6 @@ pg_catalogfloat4G c_double_precision0         Rpostgres_typesb pg_catalogfloat8/ c_money0         Rpostgres_typesbmoney- -c_date0         Rpostgres_typesbdate9 -c_time0         Rpostgres_typesb -pg_catalogtimeC - c_timestamp0         Rpostgres_typesb -pg_catalog timestampM -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzA - -c_interval0         Rpostgres_typesb -pg_cataloginterval- -c_cidr0         Rpostgres_typesbcidr- -c_inet0         Rpostgres_typesbinet3 - c_macaddr0         Rpostgres_typesb macaddr5 - -c_macaddr80         Rpostgres_typesb -macaddr8- c_uuid0         Rpostgres_typesbuuid/ c_enum0         Rpostgres_typesbc_enumч postgres_string_typesB @@ -51,7 +35,25 @@ pg_catalogvarcharP c_character_varying0         Rpostgres_string_typesb pg_catalogvarchar8 c_bpchar0         Rpostgres_string_typesbbpchar4 -c_text0         Rpostgres_string_typesbtextн +c_text0         Rpostgres_string_typesbtextЙ +postgres_datetime_types6 +c_date0         Rpostgres_datetime_typesbdateB +c_time0         Rpostgres_datetime_typesb +pg_catalogtimeL + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampV +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzJ + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalД +postgres_network_types5 +c_cidr0         Rpostgres_network_typesbcidr5 +c_inet0         Rpostgres_network_typesbinet; + c_macaddr0         Rpostgres_network_typesb macaddr= + +c_macaddr80         Rpostgres_network_typesb +macaddr8н postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10219,8 +10221,9 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╫ -о +bio_type Autobiography BiographyMemoir╧ + +┴ INSERT INTO postgres_types ( c_boolean, @@ -10233,17 +10236,8 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8 + c_enum ) VALUES ( $1, @@ -10257,16 +10251,7 @@ VALUES ( $9, $10, $11, - $12, - $13, - $14, - $15, - $16, - $17::c_enum, - $18, - $19, - $20::macaddr, - $21::macaddr8 + $12::c_enum )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10281,22 +10266,9 @@ c_smallint*TP c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F B c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_date0         8Rpublicpostgres_typesbdatezc_date*N J -c_time0         8Rpublicpostgres_typesbpg_catalog.timezc_time*] Y - c_timestamp0         8Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*ok -c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV - -c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# - -c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesю -сINSERT INTO postgres_types +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  +c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ +ШINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10307,15 +10279,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid ) VALUES ( $1, @@ -10327,15 +10291,7 @@ VALUES ( $7, $8, $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - $17, - $18 + $10 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10349,18 +10305,8 @@ c_smallint*RN c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ c_money0         Rpublicpostgres_typesbmoneyzc_money*A = -c_date0         Rpublicpostgres_typesbdatezc_date*L H -c_time0         Rpublicpostgres_typesbpg_catalog.timezc_time*[ W - c_timestamp0         Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*m i -c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT - -c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ -ЄSELECT +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ +╒SELECT c_boolean, c_bit, c_smallint, @@ -10371,17 +10317,8 @@ c_interval*A= c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8::TEXT AS c_macaddr8 + c_enum FROM postgres_types LIMIT 1GetPostgresTypes:one"G c_boolean0         Rpostgres_typesb @@ -10405,25 +10342,9 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! - -c_macaddr80         btext: query.sqlг -▐SELECT +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД +ьSELECT c_smallint, c_boolean, c_integer, @@ -10433,15 +10354,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_cidr, - c_inet, - c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY @@ -10454,15 +10367,7 @@ GROUP BY c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid LIMIT 1GetPostgresTypesCnt:one"I c_smallint0         Rpostgres_typesb @@ -10483,28 +10388,15 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql╤ -зSELECT +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sqlЇ +╩SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types -CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( +CROSS JOIN postgres_string_types +CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b @@ -10593,7 +10485,134 @@ LIMIT 1 GetPostgresStringTypesTextSearch:one"0 tsvectorztsv" rnk0         @breal*%! -to_tsquery0         btext: query.sql╨ +to_tsquery0         btext: query.sql╪ +Х +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5)InsertPostgresDateTimeTypes:exec*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval2 DateTime types : query.sqlBpostgres_datetime_types┴ +hSELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1GetPostgresDateTimeTypes:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval: query.sqlY +&TRUNCATE TABLE postgres_datetime_typesTruncatePostgresDateTimeTypes:exec: query.sqlш +ыSELECT + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval, + COUNT(*) AS cnt +FROM postgres_datetime_types +GROUP BY + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +LIMIT 1GetPostgresDateTimeTypesCnt:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval" +cnt0         @bbigint: query.sql╬ +ФINSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5) InsertPostgresDateTimeTypesBatch :copyfrom*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval: query.sqlBpostgres_datetime_typesЙ +Ф +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr, + c_macaddr8 +) VALUES ( + $1, + $2, + $3, + $4::macaddr8 +)InsertPostgresNetworkTypes:exec*KG +c_cidr0         8R publicpostgres_network_typesbcidrzc_cidr*KG +c_inet0         8R publicpostgres_network_typesbinetzc_inet*TP + c_macaddr0         8R publicpostgres_network_typesb macaddrz c_macaddr*'# + +c_macaddr80         b +macaddr82 Network types : query.sqlBpostgres_network_typesЙ +tSELECT + c_cidr, + c_inet, + c_macaddr, + c_macaddr8::TEXT AS c_macaddr8 +FROM postgres_network_types +LIMIT 1GetPostgresNetworkTypes:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr"! + +c_macaddr80         btext: query.sqlW +%TRUNCATE TABLE postgres_network_typesTruncatePostgresNetworkTypes:exec: query.sqlк +ФSELECT + c_cidr, + c_inet, + c_macaddr, + COUNT(*) AS cnt +FROM postgres_network_types +GROUP BY + c_cidr, + c_inet, + c_macaddr +LIMIT 1GetPostgresNetworkTypesCnt:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr" +cnt0         @bbigint: query.sqlЭ +`INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr +) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE +c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE +c_inet0         R publicpostgres_network_typesbinetzc_inet*RN + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index a26cf9c9..7bb891b7 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -22,15 +22,6 @@ public class PostgresType public float? CReal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; @@ -42,6 +33,21 @@ public class PostgresStringType public string CBpchar { get; set; } public string CText { get; set; } }; + public class PostgresDatetimeType + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public class PostgresNetworkType + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; public class PostgresUnstructuredType { public JsonElement? CJson { get; set; } diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index ddf48d48..deea00df 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -41,7 +41,7 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval, @c_uuid, @c_enum::c_enum, @c_cidr, @c_inet, @c_macaddr::macaddr, @c_macaddr8::macaddr8 )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { public bool? CBoolean { get; set; } @@ -54,17 +54,8 @@ public class InsertPostgresTypesArgs public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } }; public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { @@ -84,17 +75,8 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -118,22 +100,13 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { public bool? CBoolean { get; set; } @@ -145,15 +118,7 @@ public class InsertPostgresTypesBatchArgs public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } }; public async Task InsertPostgresTypesBatch(List args) { @@ -174,15 +139,7 @@ public async Task InsertPostgresTypesBatch(List ar await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); } await writer.CompleteAsync(); @@ -192,7 +149,7 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_enum, c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { public bool? CBoolean { get; set; } @@ -205,17 +162,8 @@ public class GetPostgresTypesRow public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } }; public async Task GetPostgresTypes() { @@ -241,17 +189,8 @@ public async Task GetPostgresTypes() CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CUuid = reader.IsDBNull(15) ? (Guid? )null : reader.GetFieldValue(15), - CEnum = reader.IsDBNull(16) ? (CEnum? )null : reader.GetString(16).ToCEnum(), - CCidr = reader.IsDBNull(17) ? (NpgsqlCidr? )null : reader.GetFieldValue(17), - CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), - CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) + CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() }; } } @@ -283,17 +222,8 @@ public async Task GetPostgresTypes() CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDate = reader.IsDBNull(10) ? (DateTime? )null : reader.GetDateTime(10), - CTime = reader.IsDBNull(11) ? (TimeSpan? )null : reader.GetFieldValue(11), - CTimestamp = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CTimestampWithTz = reader.IsDBNull(13) ? (DateTime? )null : reader.GetDateTime(13), - CInterval = reader.IsDBNull(14) ? (TimeSpan? )null : reader.GetFieldValue(14), - CUuid = reader.IsDBNull(15) ? (Guid? )null : reader.GetFieldValue(15), - CEnum = reader.IsDBNull(16) ? (CEnum? )null : reader.GetString(16).ToCEnum(), - CCidr = reader.IsDBNull(17) ? (NpgsqlCidr? )null : reader.GetFieldValue(17), - CInet = reader.IsDBNull(18) ? null : reader.GetFieldValue(18), - CMacaddr = reader.IsDBNull(19) ? null : reader.GetFieldValue(19), - CMacaddr8 = reader.IsDBNull(20) ? null : reader.GetString(20) + CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() }; } } @@ -302,7 +232,7 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, c_uuid, c_cidr, c_inet, c_macaddr LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { public short? CSmallint { get; set; } @@ -314,15 +244,7 @@ public class GetPostgresTypesCntRow public decimal? CDecimal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } public Guid? CUuid { get; set; } - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } public long Cnt { get; set; } }; public async Task GetPostgresTypesCnt() @@ -348,16 +270,8 @@ public async Task GetPostgresTypesCnt() CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CUuid = reader.IsDBNull(14) ? (Guid? )null : reader.GetFieldValue(14), - CCidr = reader.IsDBNull(15) ? (NpgsqlCidr? )null : reader.GetFieldValue(15), - CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), - CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - Cnt = reader.GetInt64(18) + CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) }; } } @@ -388,16 +302,8 @@ public async Task GetPostgresTypesCnt() CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CDate = reader.IsDBNull(9) ? (DateTime? )null : reader.GetDateTime(9), - CTime = reader.IsDBNull(10) ? (TimeSpan? )null : reader.GetFieldValue(10), - CTimestamp = reader.IsDBNull(11) ? (DateTime? )null : reader.GetDateTime(11), - CTimestampWithTz = reader.IsDBNull(12) ? (DateTime? )null : reader.GetDateTime(12), - CInterval = reader.IsDBNull(13) ? (TimeSpan? )null : reader.GetFieldValue(13), - CUuid = reader.IsDBNull(14) ? (Guid? )null : reader.GetFieldValue(14), - CCidr = reader.IsDBNull(15) ? (NpgsqlCidr? )null : reader.GetFieldValue(15), - CInet = reader.IsDBNull(16) ? null : reader.GetFieldValue(16), - CMacaddr = reader.IsDBNull(17) ? null : reader.GetFieldValue(17), - Cnt = reader.GetInt64(18) + CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) }; } } @@ -406,7 +312,7 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -783,6 +689,450 @@ public async Task GetPostgresStringTypesTex return null; } + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(InsertPostgresDateTimeTypesSql)) + { + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task GetPostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesRow + { + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesRow + { + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresDateTimeTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresDateTimeTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesCntRow + { + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresDateTimeTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresDateTimeTypesCntRow + { + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + + return null; + } + + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) + { + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresNetworkTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } + }; + public async Task GetPostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNetworkTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresNetworkTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresNetworkTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNetworkTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } + } + + return null; + } + + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + }; + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; public class InsertPostgresUnstructuredTypesArgs { diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index 35692c3f..cea3ae5f 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -140,177 +140,191 @@ } }, { - "name": "c_date", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, "type": { - "name": "date" + "name": "uuid" } }, { - "name": "c_time", + "name": "c_enum", "length": -1, "table": { "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "time" + "name": "c_enum" } - }, + } + ] + }, + { + "rel": { + "name": "postgres_string_types" + }, + "columns": [ { - "name": "c_timestamp", + "name": "c_char", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "bpchar" } }, { - "name": "c_timestamp_with_tz", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "varchar" } }, { - "name": "c_interval", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "varchar" } }, { - "name": "c_cidr", + "name": "c_bpchar", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "bpchar" } }, { - "name": "c_inet", + "name": "c_text", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "postgres_datetime_types" + }, + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" } }, { - "name": "c_macaddr", + "name": "c_time", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "time" } }, { - "name": "c_macaddr8", + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "macaddr8" + "schema": "pg_catalog", + "name": "timestamp" } }, { - "name": "c_uuid", + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "timestamptz" } }, { - "name": "c_enum", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_datetime_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "interval" } } ] }, { "rel": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "columns": [ { - "name": "c_char", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bpchar" - } - }, - { - "name": "c_varchar", + "name": "c_cidr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "cidr" } }, { - "name": "c_character_varying", + "name": "c_inet", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "inet" } }, { - "name": "c_bpchar", + "name": "c_macaddr", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "bpchar" + "name": "macaddr" } }, { - "name": "c_text", + "name": "c_macaddr8", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr8" } } ] @@ -32561,7 +32575,7 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n $16,\n $17::c_enum,\n $18,\n $19,\n $20::macaddr,\n $21::macaddr8\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ @@ -32727,86 +32741,6 @@ }, { "number": 11, - "column": { - "name": "c_date", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 12, - "column": { - "name": "c_time", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 14, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 15, - "column": { - "name": "c_interval", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 16, "column": { "name": "c_uuid", "length": -1, @@ -32822,7 +32756,7 @@ } }, { - "number": 17, + "number": 12, "column": { "name": "c_enum", "length": -1, @@ -32830,58 +32764,6 @@ "name": "c_enum" } } - }, - { - "number": 18, - "column": { - "name": "c_cidr", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 19, - "column": { - "name": "c_inet", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 20, - "column": { - "name": "c_macaddr", - "length": -1, - "type": { - "name": "macaddr" - } - } - }, - { - "number": 21, - "column": { - "name": "c_macaddr8", - "length": -1, - "type": { - "name": "macaddr8" - } - } } ], "comments": [ @@ -32893,7 +32775,7 @@ } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10, \n $11, \n $12, \n $13, \n $14, \n $15, \n $16, \n $17, \n $18\n)", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", "name": "InsertPostgresTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -33034,81 +32916,6 @@ }, { "number": 10, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 11, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 12, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 13, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 14, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - }, - { - "number": 15, "column": { "name": "c_uuid", "length": -1, @@ -33121,51 +32928,6 @@ }, "originalName": "c_uuid" } - }, - { - "number": 16, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 17, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 18, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } } ], "filename": "query.sql", @@ -33174,7 +32936,7 @@ } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_enum,\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_types \nLIMIT 1", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", "name": "GetPostgresTypes", "cmd": ":one", "columns": [ @@ -33297,65 +33059,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33377,53 +33080,12 @@ "name": "c_enum" }, "originalName": "c_enum" - }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, - { - "name": "c_macaddr8", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n c_uuid,\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ @@ -33534,65 +33196,6 @@ }, "originalName": "c_money" }, - { - "name": "c_date", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - }, - { - "name": "c_time", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "time" - }, - "originalName": "c_time" - }, - { - "name": "c_timestamp", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamp" - }, - "originalName": "c_timestamp" - }, - { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "timestamptz" - }, - "originalName": "c_timestamp_with_tz" - }, - { - "name": "c_interval", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "interval" - }, - "originalName": "c_interval" - }, { "name": "c_uuid", "length": -1, @@ -33604,39 +33207,6 @@ }, "originalName": "c_uuid" }, - { - "name": "c_cidr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - }, - { - "name": "c_inet", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - }, - { - "name": "c_macaddr", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - }, { "name": "cnt", "notNull": true, @@ -33650,7 +33220,7 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types", + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", "name": "GetPostgresFunctions", "cmd": ":one", "columns": [ @@ -34077,6 +33647,563 @@ ], "filename": "query.sql" }, + { + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "comments": [ + " DateTime types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_date", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_time", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, + { + "name": "c_interval", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "interval" + }, + "originalName": "c_interval" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_date", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + } + }, + { + "number": 2, + "column": { + "name": "c_time", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.time" + }, + "originalName": "c_time" + } + }, + { + "number": 3, + "column": { + "name": "c_timestamp", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.timestamptz" + }, + "originalName": "c_timestamp_with_tz" + } + }, + { + "number": 5, + "column": { + "name": "c_interval", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_datetime_types" + }, + "type": { + "name": "pg_catalog.interval" + }, + "originalName": "c_interval" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_datetime_types" + } + }, + { + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + }, + { + "number": 4, + "column": { + "name": "c_macaddr8", + "length": -1, + "type": { + "name": "macaddr8" + } + } + } + ], + "comments": [ + " Network types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_cidr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + }, + { + "name": "c_inet", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + }, + { + "name": "c_macaddr", + "length": -1, + "table": { + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_cidr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "cidr" + }, + "originalName": "c_cidr" + } + }, + { + "number": 2, + "column": { + "name": "c_inet", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "inet" + }, + "originalName": "c_inet" + } + }, + { + "number": 3, + "column": { + "name": "c_macaddr", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_network_types" + }, + "type": { + "name": "macaddr" + }, + "originalName": "c_macaddr" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_network_types" + } + }, { "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", "name": "InsertPostgresUnstructuredTypes", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index 42a1d0e1..cef8e0ba 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunnerЎъ public"╕publicч +./dist/LocalRunner■ы public"└public▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -25,22 +25,6 @@ pg_catalogfloat4G c_double_precision0         Rpostgres_typesb pg_catalogfloat8/ c_money0         Rpostgres_typesbmoney- -c_date0         Rpostgres_typesbdate9 -c_time0         Rpostgres_typesb -pg_catalogtimeC - c_timestamp0         Rpostgres_typesb -pg_catalog timestampM -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzA - -c_interval0         Rpostgres_typesb -pg_cataloginterval- -c_cidr0         Rpostgres_typesbcidr- -c_inet0         Rpostgres_typesbinet3 - c_macaddr0         Rpostgres_typesb macaddr5 - -c_macaddr80         Rpostgres_typesb -macaddr8- c_uuid0         Rpostgres_typesbuuid/ c_enum0         Rpostgres_typesbc_enumч postgres_string_typesB @@ -51,7 +35,25 @@ pg_catalogvarcharP c_character_varying0         Rpostgres_string_typesb pg_catalogvarchar8 c_bpchar0         Rpostgres_string_typesbbpchar4 -c_text0         Rpostgres_string_typesbtextн +c_text0         Rpostgres_string_typesbtextЙ +postgres_datetime_types6 +c_date0         Rpostgres_datetime_typesbdateB +c_time0         Rpostgres_datetime_typesb +pg_catalogtimeL + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampV +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzJ + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalД +postgres_network_types5 +c_cidr0         Rpostgres_network_typesbcidr5 +c_inet0         Rpostgres_network_typesbinet; + c_macaddr0         Rpostgres_network_typesb macaddr= + +c_macaddr80         Rpostgres_network_typesb +macaddr8н postgres_unstructured_types: c_json0         Rpostgres_unstructured_typesbjsonJ c_json_string_override0         Rpostgres_unstructured_typesbjson< @@ -10219,8 +10221,9 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╫ -о +bio_type Autobiography BiographyMemoir╧ + +┴ INSERT INTO postgres_types ( c_boolean, @@ -10233,17 +10236,8 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8 + c_enum ) VALUES ( $1, @@ -10257,16 +10251,7 @@ VALUES ( $9, $10, $11, - $12, - $13, - $14, - $15, - $16, - $17::c_enum, - $18, - $19, - $20::macaddr, - $21::macaddr8 + $12::c_enum )InsertPostgresTypes:exec*TP c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR @@ -10281,22 +10266,9 @@ c_smallint*TP c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F B c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_date0         8Rpublicpostgres_typesbdatezc_date*N J -c_time0         8Rpublicpostgres_typesbpg_catalog.timezc_time*] Y - c_timestamp0         8Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*ok -c_timestamp_with_tz0         8Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*ZV - -c_interval0         8Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum*C? -c_cidr0         8Rpublicpostgres_typesbcidrzc_cidr*C? -c_inet0         8Rpublicpostgres_typesbinetzc_inet*%! - c_macaddr0         b macaddr*'# - -c_macaddr80         b -macaddr82 Basic types : query.sqlBpostgres_typesю -сINSERT INTO postgres_types +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  +c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ +ШINSERT INTO postgres_types ( c_boolean, c_smallint, @@ -10307,15 +10279,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid ) VALUES ( $1, @@ -10327,15 +10291,7 @@ VALUES ( $7, $8, $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - $17, - $18 + $10 )InsertPostgresTypesBatch :copyfrom*RN c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP @@ -10349,18 +10305,8 @@ c_smallint*RN c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ c_money0         Rpublicpostgres_typesbmoneyzc_money*A = -c_date0         Rpublicpostgres_typesbdatezc_date*L H -c_time0         Rpublicpostgres_typesbpg_catalog.timezc_time*[ W - c_timestamp0         Rpublicpostgres_typesbpg_catalog.timestampz c_timestamp*m i -c_timestamp_with_tz0         Rpublicpostgres_typesbpg_catalog.timestamptzzc_timestamp_with_tz*XT - -c_interval0         Rpublicpostgres_typesbpg_catalog.intervalz -c_interval*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid*A= -c_cidr0         Rpublicpostgres_typesbcidrzc_cidr*A= -c_inet0         Rpublicpostgres_typesbinetzc_inet*JF - c_macaddr0         Rpublicpostgres_typesb macaddrz c_macaddr: query.sqlBpostgres_types░ -ЄSELECT +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ +╒SELECT c_boolean, c_bit, c_smallint, @@ -10371,17 +10317,8 @@ c_interval*A= c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8::TEXT AS c_macaddr8 + c_enum FROM postgres_types LIMIT 1GetPostgresTypes:one"G c_boolean0         Rpostgres_typesb @@ -10405,25 +10342,9 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr"! - -c_macaddr80         btext: query.sqlг -▐SELECT +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД +ьSELECT c_smallint, c_boolean, c_integer, @@ -10433,15 +10354,7 @@ c_macaddr80 c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_cidr, - c_inet, - c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY @@ -10454,15 +10367,7 @@ GROUP BY c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid LIMIT 1GetPostgresTypesCnt:one"I c_smallint0         Rpostgres_typesb @@ -10483,28 +10388,15 @@ pg_catalognumericz c_decimal"[ c_double_precision0         Rpostgres_typesb pg_catalogfloat8zc_double_precision"8 c_money0         Rpostgres_typesbmoneyzc_money"5 -c_date0         Rpostgres_typesbdatezc_date"A -c_time0         Rpostgres_typesb -pg_catalogtimezc_time"P - c_timestamp0         Rpostgres_typesb -pg_catalog timestampz c_timestamp"b -c_timestamp_with_tz0         Rpostgres_typesb -pg_catalog timestamptzzc_timestamp_with_tz"M - -c_interval0         Rpostgres_typesb -pg_catalogintervalz -c_interval"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"5 -c_cidr0         Rpostgres_typesbcidrzc_cidr"5 -c_inet0         Rpostgres_typesbinetzc_inet"> - c_macaddr0         Rpostgres_typesb macaddrz c_macaddr" -cnt0         @bbigint: query.sql╤ -зSELECT +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sqlЇ +╩SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types -CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( +CROSS JOIN postgres_string_types +CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( max_integer0         @b anyarray"( max_varchar0         @b @@ -10593,7 +10485,134 @@ LIMIT 1 GetPostgresStringTypesTextSearch:one"0 tsvectorztsv" rnk0         @breal*%! -to_tsquery0         btext: query.sql╨ +to_tsquery0         btext: query.sql╪ +Х +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5)InsertPostgresDateTimeTypes:exec*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval2 DateTime types : query.sqlBpostgres_datetime_types┴ +hSELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1GetPostgresDateTimeTypes:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval: query.sqlY +&TRUNCATE TABLE postgres_datetime_typesTruncatePostgresDateTimeTypes:exec: query.sqlш +ыSELECT + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval, + COUNT(*) AS cnt +FROM postgres_datetime_types +GROUP BY + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +LIMIT 1GetPostgresDateTimeTypesCnt:one"> +c_date0         Rpostgres_datetime_typesbdatezc_date"J +c_time0         Rpostgres_datetime_typesb +pg_catalogtimezc_time"Y + c_timestamp0         Rpostgres_datetime_typesb +pg_catalog timestampz c_timestamp"k +c_timestamp_with_tz0         Rpostgres_datetime_typesb +pg_catalog timestamptzzc_timestamp_with_tz"V + +c_interval0         Rpostgres_datetime_typesb +pg_catalogintervalz +c_interval" +cnt0         @bbigint: query.sql╬ +ФINSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5) InsertPostgresDateTimeTypesBatch :copyfrom*JF +c_date0         R!publicpostgres_datetime_typesbdatezc_date*UQ +c_time0         R!publicpostgres_datetime_typesbpg_catalog.timezc_time*d` + c_timestamp0         R!publicpostgres_datetime_typesbpg_catalog.timestampz c_timestamp*vr +c_timestamp_with_tz0         R!publicpostgres_datetime_typesbpg_catalog.timestamptzzc_timestamp_with_tz*a] + +c_interval0         R!publicpostgres_datetime_typesbpg_catalog.intervalz +c_interval: query.sqlBpostgres_datetime_typesЙ +Ф +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr, + c_macaddr8 +) VALUES ( + $1, + $2, + $3, + $4::macaddr8 +)InsertPostgresNetworkTypes:exec*KG +c_cidr0         8R publicpostgres_network_typesbcidrzc_cidr*KG +c_inet0         8R publicpostgres_network_typesbinetzc_inet*TP + c_macaddr0         8R publicpostgres_network_typesb macaddrz c_macaddr*'# + +c_macaddr80         b +macaddr82 Network types : query.sqlBpostgres_network_typesЙ +tSELECT + c_cidr, + c_inet, + c_macaddr, + c_macaddr8::TEXT AS c_macaddr8 +FROM postgres_network_types +LIMIT 1GetPostgresNetworkTypes:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr"! + +c_macaddr80         btext: query.sqlW +%TRUNCATE TABLE postgres_network_typesTruncatePostgresNetworkTypes:exec: query.sqlк +ФSELECT + c_cidr, + c_inet, + c_macaddr, + COUNT(*) AS cnt +FROM postgres_network_types +GROUP BY + c_cidr, + c_inet, + c_macaddr +LIMIT 1GetPostgresNetworkTypesCnt:one"= +c_cidr0         Rpostgres_network_typesbcidrzc_cidr"= +c_inet0         Rpostgres_network_typesbinetzc_inet"F + c_macaddr0         Rpostgres_network_typesb macaddrz c_macaddr" +cnt0         @bbigint: query.sqlЭ +`INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr +) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE +c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE +c_inet0         R publicpostgres_network_typesbinetzc_inet*RN + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ ∙ INSERT INTO postgres_unstructured_types ( diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index 19550a1f..4a4fc751 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -13,17 +13,8 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8 + c_enum ) VALUES ( sqlc.narg('c_boolean'), @@ -36,17 +27,8 @@ VALUES ( sqlc.narg('c_decimal'), sqlc.narg('c_double_precision'), sqlc.narg('c_money'), - sqlc.narg('c_date'), - sqlc.narg('c_time'), - sqlc.narg('c_timestamp'), - sqlc.narg('c_timestamp_with_tz'), - sqlc.narg('c_interval'), sqlc.narg('c_uuid'), - sqlc.narg('c_enum')::c_enum, - sqlc.narg('c_cidr'), - sqlc.narg('c_inet'), - sqlc.narg('c_macaddr')::macaddr, - sqlc.narg('c_macaddr8')::macaddr8 + sqlc.narg('c_enum')::c_enum ); -- name: InsertPostgresTypesBatch :copyfrom @@ -61,15 +43,7 @@ INSERT INTO postgres_types c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid ) VALUES ( $1, @@ -81,15 +55,7 @@ VALUES ( $7, $8, $9, - $10, - $11, - $12, - $13, - $14, - $15, - $16, - $17, - $18 + $10 ); -- name: GetPostgresTypes :one @@ -104,17 +70,8 @@ SELECT c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_enum, - c_cidr, - c_inet, - c_macaddr, - c_macaddr8::TEXT AS c_macaddr8 + c_enum FROM postgres_types LIMIT 1; @@ -129,15 +86,7 @@ SELECT c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, c_uuid, - c_cidr, - c_inet, - c_macaddr, COUNT(*) AS cnt FROM postgres_types GROUP BY @@ -150,15 +99,7 @@ GROUP BY c_decimal, c_double_precision, c_money, - c_date, - c_time, - c_timestamp, - c_timestamp_with_tz, - c_interval, - c_uuid, - c_cidr, - c_inet, - c_macaddr + c_uuid LIMIT 1; -- name: GetPostgresFunctions :one @@ -167,7 +108,8 @@ SELECT MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types -CROSS JOIN postgres_string_types; +CROSS JOIN postgres_string_types +CROSS JOIN postgres_datetime_types; -- name: TruncatePostgresTypes :exec TRUNCATE TABLE postgres_types; @@ -233,6 +175,100 @@ FROM txt_query ORDER BY rnk DESC LIMIT 1; +/* DateTime types */ + +-- name: InsertPostgresDateTimeTypes :exec +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5); + +-- name: GetPostgresDateTimeTypes :one +SELECT * FROM postgres_datetime_types LIMIT 1; + +-- name: TruncatePostgresDateTimeTypes :exec +TRUNCATE TABLE postgres_datetime_types; + +-- name: GetPostgresDateTimeTypesCnt :one +SELECT + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval, + COUNT(*) AS cnt +FROM postgres_datetime_types +GROUP BY + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +LIMIT 1; + +-- name: InsertPostgresDateTimeTypesBatch :copyfrom +INSERT INTO postgres_datetime_types +( + c_date, + c_time, + c_timestamp, + c_timestamp_with_tz, + c_interval +) VALUES ($1, $2, $3, $4, $5); + +/* Network types */ + +-- name: InsertPostgresNetworkTypes :exec +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr, + c_macaddr8 +) VALUES ( + sqlc.narg('c_cidr'), + sqlc.narg('c_inet'), + sqlc.narg('c_macaddr'), + sqlc.narg('c_macaddr8')::macaddr8 +); + +-- name: GetPostgresNetworkTypes :one +SELECT + c_cidr, + c_inet, + c_macaddr, + c_macaddr8::TEXT AS c_macaddr8 +FROM postgres_network_types +LIMIT 1; + +-- name: TruncatePostgresNetworkTypes :exec +TRUNCATE TABLE postgres_network_types; + +-- name: GetPostgresNetworkTypesCnt :one +SELECT + c_cidr, + c_inet, + c_macaddr, + COUNT(*) AS cnt +FROM postgres_network_types +GROUP BY + c_cidr, + c_inet, + c_macaddr +LIMIT 1; + +-- name: InsertPostgresNetworkTypesBatch :copyfrom +INSERT INTO postgres_network_types +( + c_cidr, + c_inet, + c_macaddr +) VALUES ($1, $2, $3); + /* Unstructured types */ -- name: InsertPostgresUnstructuredTypes :exec diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index 01881e1c..fa03b367 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -15,19 +15,6 @@ CREATE TABLE postgres_types ( c_double_precision DOUBLE PRECISION, c_money MONEY, - /* Date and Time Data Types */ - c_date DATE, - c_time TIME, - c_timestamp TIMESTAMP, - c_timestamp_with_tz TIMESTAMP WITH TIME ZONE, - c_interval INTERVAL, - - /* Network Address Data Types */ - c_cidr CIDR, - c_inet INET, - c_macaddr MACADDR, - c_macaddr8 MACADDR8, - /* Special Data Types */ c_uuid UUID, c_enum c_enum @@ -41,6 +28,21 @@ CREATE TABLE postgres_string_types ( c_text TEXT ); +CREATE TABLE postgres_datetime_types ( + c_date DATE, + c_time TIME, + c_timestamp TIMESTAMP, + c_timestamp_with_tz TIMESTAMP WITH TIME ZONE, + c_interval INTERVAL +); + +CREATE TABLE postgres_network_types ( + c_cidr CIDR, + c_inet INET, + c_macaddr MACADDR, + c_macaddr8 MACADDR8 +); + CREATE EXTENSION "pg_trgm"; CREATE EXTENSION "btree_gin"; From e74a768e1886d27460c27206489cf84573aad372 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Fri, 22 Aug 2025 22:50:54 +0200 Subject: [PATCH 28/33] fix: move authors schema to be first in config --- .../MySqlConnectorDapperExample/Models.cs | 178 +- .../MySqlConnectorDapperExample/request.json | 210 +- .../request.message | Bin 25750 -> 25750 bytes .../Models.cs | 178 +- .../QuerySql.cs | 1520 +++--- .../Utils.cs | 22 +- .../request.json | 4320 ++++++++--------- .../request.message | Bin 25784 -> 25784 bytes examples/MySqlConnectorExample/Models.cs | 156 +- examples/MySqlConnectorExample/request.json | 210 +- .../MySqlConnectorExample/request.message | Bin 25734 -> 25734 bytes .../MySqlConnectorLegacyExample/Models.cs | 178 +- .../MySqlConnectorLegacyExample/QuerySql.cs | 2402 ++++----- .../MySqlConnectorLegacyExample/request.json | 4320 ++++++++--------- .../request.message | Bin 25768 -> 25768 bytes examples/NpgsqlDapperExample/Models.cs | 26 +- examples/NpgsqlDapperExample/QuerySql.cs | 1566 +++--- examples/NpgsqlDapperExample/request.json | 3980 +++++++-------- examples/NpgsqlDapperExample/request.message | 236 +- examples/NpgsqlDapperLegacyExample/Models.cs | 26 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 1566 +++--- .../NpgsqlDapperLegacyExample/request.json | 3980 +++++++-------- .../NpgsqlDapperLegacyExample/request.message | 236 +- examples/NpgsqlExample/Models.cs | 4 +- examples/NpgsqlExample/QuerySql.cs | 2026 ++++---- examples/NpgsqlExample/request.json | 3980 +++++++-------- examples/NpgsqlExample/request.message | 236 +- examples/NpgsqlLegacyExample/Models.cs | 26 +- examples/NpgsqlLegacyExample/QuerySql.cs | 2548 +++++----- examples/NpgsqlLegacyExample/request.json | 3980 +++++++-------- examples/NpgsqlLegacyExample/request.message | 236 +- examples/SqliteDapperExample/Models.cs | 14 +- examples/SqliteDapperExample/QuerySql.cs | 282 +- examples/SqliteDapperExample/request.json | 1130 ++--- examples/SqliteDapperExample/request.message | Bin 7906 -> 7906 bytes examples/SqliteDapperLegacyExample/Models.cs | 14 +- .../SqliteDapperLegacyExample/QuerySql.cs | 282 +- .../SqliteDapperLegacyExample/request.json | 1130 ++--- .../SqliteDapperLegacyExample/request.message | Bin 7940 -> 7940 bytes examples/SqliteExample/Models.cs | 4 +- examples/SqliteExample/QuerySql.cs | 494 +- examples/SqliteExample/request.json | 1130 ++--- examples/SqliteExample/request.message | Bin 7890 -> 7890 bytes examples/SqliteLegacyExample/Models.cs | 14 +- examples/SqliteLegacyExample/QuerySql.cs | 742 +-- examples/SqliteLegacyExample/request.json | 1130 ++--- examples/SqliteLegacyExample/request.message | Bin 7924 -> 7924 bytes sqlc.ci.yaml | 44 +- sqlc.local.generated.yaml | 44 +- sqlc.request.generated.yaml | 44 +- 50 files changed, 22422 insertions(+), 22422 deletions(-) diff --git a/examples/MySqlConnectorDapperExample/Models.cs b/examples/MySqlConnectorDapperExample/Models.cs index e3abdf1c..84b866b4 100644 --- a/examples/MySqlConnectorDapperExample/Models.cs +++ b/examples/MySqlConnectorDapperExample/Models.cs @@ -5,6 +5,19 @@ using System.Text.Json; namespace MySqlConnectorDapperExampleGen; +public class Author +{ + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } +}; +public class Book +{ + public required long Id { get; init; } + public required string Name { get; init; } + public required long AuthorId { get; init; } + public string? Description { get; init; } +}; public class MysqlNumericType { public bool? CBool { get; init; } @@ -56,19 +69,6 @@ public class MysqlBinaryType public byte[]? CMediumblob { get; init; } public byte[]? CLongblob { get; init; } }; -public class Author -{ - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } -}; -public class Book -{ - public required long Id { get; init; } - public required string Name { get; init; } - public required long AuthorId { get; init; } - public string? Description { get; init; } -}; public class ExtendedBio { public string? AuthorName { get; init; } @@ -76,162 +76,162 @@ public class ExtendedBio public BiosBioType? BioType { get; init; } public HashSet? AuthorType { get; init; } }; -public enum MysqlStringTypesCEnum +public enum BiosBioType { Invalid = 0, // reserved for invalid enum value - Small = 1, - Medium = 2, - Big = 3 + Autobiography = 1, + Biography = 2, + Memoir = 3 } -public static class MysqlStringTypesCEnumExtensions +public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCEnum.Invalid, - ["small"] = MysqlStringTypesCEnum.Small, - ["medium"] = MysqlStringTypesCEnum.Medium, - ["big"] = MysqlStringTypesCEnum.Big + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCEnum.Invalid] = string.Empty, - [MysqlStringTypesCEnum.Small] = "small", - [MysqlStringTypesCEnum.Medium] = "medium", - [MysqlStringTypesCEnum.Big] = "big" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCEnum me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCEnumSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum MysqlStringTypesCSet +public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value - Tea = 1, - Coffee = 2, - Milk = 3 + Author = 1, + Editor = 2, + Translator = 3 } -public static class MysqlStringTypesCSetExtensions +public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCSet.Invalid, - ["tea"] = MysqlStringTypesCSet.Tea, - ["coffee"] = MysqlStringTypesCSet.Coffee, - ["milk"] = MysqlStringTypesCSet.Milk + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCSet.Invalid] = string.Empty, - [MysqlStringTypesCSet.Tea] = "tea", - [MysqlStringTypesCSet.Coffee] = "coffee", - [MysqlStringTypesCSet.Milk] = "milk" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCSet me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCSetSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum BiosBioType +public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value - Autobiography = 1, - Biography = 2, - Memoir = 3 + Small = 1, + Medium = 2, + Big = 3 } -public static class BiosBioTypeExtensions +public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosBioType.Invalid, - ["Autobiography"] = BiosBioType.Autobiography, - ["Biography"] = BiosBioType.Biography, - ["Memoir"] = BiosBioType.Memoir + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosBioType.Invalid] = string.Empty, - [BiosBioType.Autobiography] = "Autobiography", - [BiosBioType.Biography] = "Biography", - [BiosBioType.Memoir] = "Memoir" + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" }; - public static BiosBioType ToBiosBioType(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosBioType me) + public static string Stringify(this MysqlStringTypesCEnum me) { return EnumToString[me]; } - public static HashSet ToBiosBioTypeSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum BiosAuthorType +public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value - Author = 1, - Editor = 2, - Translator = 3 + Tea = 1, + Coffee = 2, + Milk = 3 } -public static class BiosAuthorTypeExtensions +public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosAuthorType.Invalid, - ["Author"] = BiosAuthorType.Author, - ["Editor"] = BiosAuthorType.Editor, - ["Translator"] = BiosAuthorType.Translator + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosAuthorType.Invalid] = string.Empty, - [BiosAuthorType.Author] = "Author", - [BiosAuthorType.Editor] = "Editor", - [BiosAuthorType.Translator] = "Translator" + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" }; - public static BiosAuthorType ToBiosAuthorType(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosAuthorType me) + public static string Stringify(this MysqlStringTypesCSet me) { return EnumToString[me]; } - public static HashSet ToBiosAuthorTypeSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperExample/request.json b/examples/MySqlConnectorDapperExample/request.json index f8f7e330..23d0fc09 100644 --- a/examples/MySqlConnectorDapperExample/request.json +++ b/examples/MySqlConnectorDapperExample/request.json @@ -3,8 +3,8 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/types/schema.sql", - "examples/config/mysql/authors/schema.sql" + "examples/config/mysql/authors/schema.sql", + "examples/config/mysql/types/schema.sql" ], "queries": [ "examples/config/mysql/authors/query.sql", @@ -25,6 +25,95 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "mysql_numeric_types" @@ -442,98 +531,25 @@ } } ] - }, + } + ], + "enums": [ { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } + "name": "bios_bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" ] }, { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } + "name": "bios_author_type", + "vals": [ + "Author", + "Editor", + "Translator" ] - } - ], - "enums": [ + }, { "name": "mysql_string_types_c_enum", "vals": [ @@ -549,22 +565,6 @@ "coffee", "milk" ] - }, - { - "name": "bios_bio_type", - "vals": [ - "Autobiography", - "Biography", - "Memoir" - ] - }, - { - "name": "bios_author_type", - "vals": [ - "Author", - "Editor", - "Translator" - ] } ] }, diff --git a/examples/MySqlConnectorDapperExample/request.message b/examples/MySqlConnectorDapperExample/request.message index 1864e1375ba0f3cc3cf3454948d4fdaa753a0b4d..29881c40817f26bfc04c3ab189c1716ee92d1d64 100644 GIT binary patch delta 90 zcmbPsl5yHeMt-jMtXzynLae!!#f3Ri8WR)cc#4xVQgajaKzy}{iDDBMuuatF+8Doz uY4dV!BSsdf^=w>|Co+p|R$^}9m~6?L!lchNS AuthorType { get; set; } }; - public enum MysqlStringTypesCEnum + public enum BiosBioType { Invalid = 0, // reserved for invalid enum value - Small = 1, - Medium = 2, - Big = 3 + Autobiography = 1, + Biography = 2, + Memoir = 3 } - public static class MysqlStringTypesCEnumExtensions + public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCEnum.Invalid, - ["small"] = MysqlStringTypesCEnum.Small, - ["medium"] = MysqlStringTypesCEnum.Medium, - ["big"] = MysqlStringTypesCEnum.Big + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCEnum.Invalid] = string.Empty, - [MysqlStringTypesCEnum.Small] = "small", - [MysqlStringTypesCEnum.Medium] = "medium", - [MysqlStringTypesCEnum.Big] = "big" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCEnum me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCEnumSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum MysqlStringTypesCSet + public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value - Tea = 1, - Coffee = 2, - Milk = 3 + Author = 1, + Editor = 2, + Translator = 3 } - public static class MysqlStringTypesCSetExtensions + public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCSet.Invalid, - ["tea"] = MysqlStringTypesCSet.Tea, - ["coffee"] = MysqlStringTypesCSet.Coffee, - ["milk"] = MysqlStringTypesCSet.Milk + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCSet.Invalid] = string.Empty, - [MysqlStringTypesCSet.Tea] = "tea", - [MysqlStringTypesCSet.Coffee] = "coffee", - [MysqlStringTypesCSet.Milk] = "milk" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCSet me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCSetSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum BiosBioType + public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value - Autobiography = 1, - Biography = 2, - Memoir = 3 + Small = 1, + Medium = 2, + Big = 3 } - public static class BiosBioTypeExtensions + public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosBioType.Invalid, - ["Autobiography"] = BiosBioType.Autobiography, - ["Biography"] = BiosBioType.Biography, - ["Memoir"] = BiosBioType.Memoir + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosBioType.Invalid] = string.Empty, - [BiosBioType.Autobiography] = "Autobiography", - [BiosBioType.Biography] = "Biography", - [BiosBioType.Memoir] = "Memoir" + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" }; - public static BiosBioType ToBiosBioType(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosBioType me) + public static string Stringify(this MysqlStringTypesCEnum me) { return EnumToString[me]; } - public static HashSet ToBiosBioTypeSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum BiosAuthorType + public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value - Author = 1, - Editor = 2, - Translator = 3 + Tea = 1, + Coffee = 2, + Milk = 3 } - public static class BiosAuthorTypeExtensions + public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosAuthorType.Invalid, - ["Author"] = BiosAuthorType.Author, - ["Editor"] = BiosAuthorType.Editor, - ["Translator"] = BiosAuthorType.Translator + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosAuthorType.Invalid] = string.Empty, - [BiosAuthorType.Author] = "Author", - [BiosAuthorType.Editor] = "Editor", - [BiosAuthorType.Translator] = "Translator" + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" }; - public static BiosAuthorType ToBiosAuthorType(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosAuthorType me) + public static string Stringify(this MysqlStringTypesCSet me) { return EnumToString[me]; } - public static HashSet ToBiosAuthorTypeSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs index 45d502b5..d196b9ff 100644 --- a/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorDapperLegacyExample/QuerySql.cs @@ -45,590 +45,591 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; - public class InsertMysqlNumericTypesArgs + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; + public class GetAuthorRow { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CFloat { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) + public class GetAuthorArgs + { + public string Name { get; set; } + }; + public async Task GetAuthor(GetAuthorArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bool", args.CBool); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_tinyint", args.CTinyint); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_mediumint", args.CMediumint); - queryParams.Add("c_int", args.CInt); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_dec", args.CDec); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_fixed", args.CFixed); - queryParams.Add("c_float", args.CFloat); - queryParams.Add("c_double", args.CDouble); - queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlNumericTypesBatchArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public class ListAuthorsRow { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlNumericTypesBatch(List args) + public class ListAuthorsArgs { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + public int Limit { get; set; } + public int Offset { get; set; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("limit", args.Limit); + queryParams.Add("offset", args.Offset); + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] - { - supportedDateTimeFormat - } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + var result = await connection.QueryAsync(ListAuthorsSql, queryParams); + return result.AsList(); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); + } + + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; + public class CreateAuthorArgs + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); + if (this.Transaction == null) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) - { - Local = true, - TableName = "mysql_numeric_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(CreateAuthorSql, queryParams); + return; } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; - public class GetMysqlNumericTypesRow + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio); SELECT LAST_INSERT_ID()"; + public class CreateAuthorReturnIdArgs { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlNumericTypes() + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql); - return result; - } + return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; - public class GetMysqlNumericTypesCntRow + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow { - public long Cnt { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlNumericTypesCnt() + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; - public async Task TruncateMysqlNumericTypes() + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public class GetAuthorByNamePatternRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorByNamePatternArgs { + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlNumericTypesSql); - return; + { + var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); + return result.AsList(); + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlNumericTypesSql, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public class InsertMysqlStringTypesArgs + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public string Name { get; set; } }; - public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + public async Task DeleteAuthor(DeleteAuthorArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_nchar", args.CNchar); - queryParams.Add("c_national_char", args.CNationalChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_tinytext", args.CTinytext); - queryParams.Add("c_mediumtext", args.CMediumtext); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_longtext", args.CLongtext); - queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_enum", args.CEnum); - queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams); + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlStringTypesBatchArgs + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(DeleteAllAuthorsSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); + } + + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public class UpdateAuthorsArgs + { + public string Bio { get; set; } }; - public async Task InsertMysqlStringTypesBatch(List args) + public async Task UpdateAuthors(UpdateAuthorsArgs args) { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + var queryParams = new Dictionary(); + queryParams.Add("bio", args.Bio); + if (this.Transaction == null) { - var options = new TypeConverterOptions - { - Formats = new[] - { - supportedDateTimeFormat - } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + using (var connection = new MySqlConnection(ConnectionString)) + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + } + + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; + public class GetAuthorsByIdsRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsArgs + { + public long[] Ids { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + { + var transformedSql = GetAuthorsByIdsSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + var queryParams = new Dictionary(); + for (int i = 0; i < args.Ids.Length; i++) + queryParams.Add($"@idsArg{i}", args.Ids[i]); + if (this.Transaction == null) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) + using (var connection = new MySqlConnection(ConnectionString)) { - Local = true, - TableName = "mysql_string_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + var result = await connection.QueryAsync(transformedSql, queryParams); + return result.AsList(); + } } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; - public class GetMysqlStringTypesRow + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; + public class GetAuthorsByIdsAndNamesRow { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlStringTypes() + public class GetAuthorsByIdsAndNamesArgs + { + public long[] Ids { get; set; } + public string[] Names { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { + var transformedSql = GetAuthorsByIdsAndNamesSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); + var queryParams = new Dictionary(); + for (int i = 0; i < args.Ids.Length; i++) + queryParams.Add($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + queryParams.Add($"@namesArg{i}", args.Names[i]); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql); - return result; + var result = await connection.QueryAsync(transformedSql, queryParams); + return result.AsList(); } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public class GetMysqlStringTypesCntRow + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id); SELECT LAST_INSERT_ID()"; + public class CreateBookArgs { - public long Cnt { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public string Name { get; set; } + public long AuthorId { get; set; } }; - public async Task GetMysqlStringTypesCnt() + public async Task CreateBook(CreateBookArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql); - return result; - } + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; - public async Task TruncateMysqlStringTypes() + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow + { + public Author Author { get; set; } + public Book Book { get; set; } + }; + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlStringTypesSql); - return; + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlStringTypesSql, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; - public class InsertMysqlDatetimeTypesArgs + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) + public async Task> GetDuplicateAuthors() { - var queryParams = new Dictionary(); - queryParams.Add("c_year", args.CYear); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_datetime", args.CDatetime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_time", args.CTime); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams); - return; + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } - public class InsertMysqlDatetimeTypesBatchArgs + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } }; - public async Task InsertMysqlDatetimeTypesBatch(List args) + public class GetAuthorsByBookNameArgs { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + { + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) { - supportedDateTimeFormat + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) { - Local = true, - TableName = "mysql_datetime_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; - public class GetMysqlDatetimeTypesRow + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; + public class CreateExtendedBioArgs { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; - public async Task GetMysqlDatetimeTypes() + public async Task CreateExtendedBio(CreateExtendedBioArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType); + queryParams.Add("author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : null); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql); - return result; - } + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } - private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; - public class GetMysqlDatetimeTypesCntRow + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow { - public long Cnt { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; - public async Task GetMysqlDatetimeTypesCnt() + public class GetFirstExtendedBioByTypeArgs + { + public BiosBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); } - private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; - public async Task TruncateMysqlDatetimeTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql); + await connection.ExecuteAsync(TruncateExtendedBiosSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; - public class InsertMysqlBinaryTypesArgs + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public class InsertMysqlNumericTypesArgs { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CFloat { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_binary", args.CBinary); - queryParams.Add("c_varbinary", args.CVarbinary); - queryParams.Add("c_tinyblob", args.CTinyblob); - queryParams.Add("c_blob", args.CBlob); - queryParams.Add("c_mediumblob", args.CMediumblob); - queryParams.Add("c_longblob", args.CLongblob); + queryParams.Add("c_bool", args.CBool); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_tinyint", args.CTinyint); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_mediumint", args.CMediumint); + queryParams.Add("c_int", args.CInt); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_dec", args.CDec); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_fixed", args.CFixed); + queryParams.Add("c_float", args.CFloat); + queryParams.Add("c_double", args.CDouble); + queryParams.Add("c_double_precision", args.CDoublePrecision); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); + await connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlNumericTypesSql, queryParams, transaction: this.Transaction); } - public class InsertMysqlBinaryTypesBatchArgs + public class InsertMysqlNumericTypesBatchArgs { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task InsertMysqlBinaryTypesBatch(List args) + public async Task InsertMysqlNumericTypesBatch(List args) { const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; var config = new CsvConfiguration(CultureInfo.CurrentCulture) @@ -649,9 +650,13 @@ public async Task InsertMysqlBinaryTypesBatch(List(options); csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); await csvWriter.WriteRecordsAsync(args); } @@ -661,7 +666,7 @@ public async Task InsertMysqlBinaryTypesBatch(List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); await loader.LoadAsync(); await connection.CloseAsync(); } } - private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; - public class GetMysqlBinaryTypesRow + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public class GetMysqlNumericTypesRow { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task GetMysqlBinaryTypes() + public async Task GetMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesSql, transaction: this.Transaction); } - private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; - public class GetMysqlBinaryTypesCntRow + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public class GetMysqlNumericTypesCntRow { public long Cnt { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task GetMysqlBinaryTypesCnt() + public async Task GetMysqlNumericTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlNumericTypesCntSql, transaction: this.Transaction); } - private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; - public async Task TruncateMysqlBinaryTypes() + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); + await connection.ExecuteAsync(TruncateMysqlNumericTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); - } - - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; - public class GetMysqlFunctionsRow - { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } - }; - public async Task GetMysqlFunctions() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlNumericTypesSql, transaction: this.Transaction); } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorArgs + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public class InsertMysqlStringTypesArgs { - public string Name { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_nchar", args.CNchar); + queryParams.Add("c_national_char", args.CNationalChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_tinytext", args.CTinytext); + queryParams.Add("c_mediumtext", args.CMediumtext); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_longtext", args.CLongtext); + queryParams.Add("c_json", args.CJson?.GetRawText() ?? null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_enum", args.CEnum); + queryParams.Add("c_set", args.CSet != null ? string.Join(",", args.CSet) : null); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); - return result; - } + await connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlStringTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs + public class InsertMysqlStringTypesBatchArgs { - public int Limit { get; set; } - public int Offset { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task> ListAuthors(ListAuthorsArgs args) + public async Task InsertMysqlStringTypesBatch(List args) { - var queryParams = new Dictionary(); - queryParams.Add("limit", args.Limit); - queryParams.Add("offset", args.Offset); - if (this.Transaction == null) + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) { - using (var connection = new MySqlConnection(ConnectionString)) + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions { - var result = await connection.QueryAsync(ListAuthorsSql, queryParams); - return result.AsList(); - } + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); - } - - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; - public class CreateAuthorArgs - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public async Task CreateAuthor(CreateAuthorArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); - if (this.Transaction == null) + using (var connection = new MySqlConnection(ConnectionString)) { - using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(CreateAuthorSql, queryParams); - return; + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio); SELECT LAST_INSERT_ID()"; - public class CreateAuthorReturnIdArgs + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public class GetMysqlStringTypesRow { - public string Name { get; set; } - public string Bio { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task GetMysqlStringTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesSql, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public class GetMysqlStringTypesCntRow { - public long Id { get; set; } + public long Cnt { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetMysqlStringTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlStringTypesCntSql, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs - { - public string NamePattern { get; set; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncateMysqlStringTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlStringTypesSql, transaction: this.Transaction); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; + public class InsertMysqlDatetimeTypesArgs { - public string Name { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); + queryParams.Add("c_year", args.CYear); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_datetime", args.CDatetime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_time", args.CTime); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); + await connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlDatetimeTypesSql, queryParams, transaction: this.Transaction); } - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() + public class InsertMysqlDatetimeTypesBatchArgs { - if (this.Transaction == null) + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } + }; + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) { - using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAllAuthorsSql); - return; + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public class UpdateAuthorsArgs + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; + public class GetMysqlDatetimeTypesRow { - public string Bio { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + public async Task GetMysqlDatetimeTypes() { - var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); + { + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; + public class GetMysqlDatetimeTypesCntRow { - public long[] Ids { get; set; } + public long Cnt { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetMysqlDatetimeTypesCnt() { - var transformedSql = GetAuthorsByIdsSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - var queryParams = new Dictionary(); - for (int i = 0; i < args.Ids.Length; i++) - queryParams.Add($"@idsArg{i}", args.Ids[i]); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(transformedSql, queryParams); - return result.AsList(); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlDatetimeTypesCntSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public long[] Ids { get; set; } - public string[] Names { get; set; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() { - var transformedSql = GetAuthorsByIdsAndNamesSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); - var queryParams = new Dictionary(); - for (int i = 0; i < args.Ids.Length; i++) - queryParams.Add($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - queryParams.Add($"@namesArg{i}", args.Names[i]); - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(transformedSql, queryParams); - return result.AsList(); - } + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(transformedSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlDatetimeTypesSql, transaction: this.Transaction); } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id); SELECT LAST_INSERT_ID()"; - public class CreateBookArgs + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; + public class InsertMysqlBinaryTypesArgs { - public string Name { get; set; } - public long AuthorId { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_binary", args.CBinary); + queryParams.Add("c_varbinary", args.CVarbinary); + queryParams.Add("c_tinyblob", args.CTinyblob); + queryParams.Add("c_blob", args.CBlob); + queryParams.Add("c_mediumblob", args.CMediumblob); + queryParams.Add("c_longblob", args.CLongblob); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateBookSql, queryParams); + await connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertMysqlBinaryTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + public class InsertMysqlBinaryTypesBatchArgs { - public Author Author { get; set; } - public Book Book { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task InsertMysqlBinaryTypesBatch(List args) { - if (this.Transaction == null) + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) { - using (var connection = new MySqlConnection(ConnectionString)) + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions { - await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) + Formats = new[] { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + supportedDateTimeFormat } - } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + using (var connection = new MySqlConnection(ConnectionString)) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public class GetMysqlBinaryTypesRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task GetMysqlBinaryTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } - } + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; + public class GetMysqlBinaryTypesCntRow { - public string Name { get; set; } + public long Cnt { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task GetMysqlBinaryTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlBinaryTypesCntSql, transaction: this.Transaction); } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public class CreateExtendedBioArgs - { - public string AuthorName { get; set; } - public string Name { get; set; } - public BiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } - }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() { - var queryParams = new Dictionary(); - queryParams.Add("author_name", args.AuthorName); - queryParams.Add("name", args.Name); - queryParams.Add("bio_type", args.BioType); - queryParams.Add("author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : null); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + await connection.ExecuteAsync(TruncateMysqlBinaryTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateMysqlBinaryTypesSql, transaction: this.Transaction); } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow - { - public string AuthorName { get; set; } - public string Name { get; set; } - public BiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } - }; - public class GetFirstExtendedBioByTypeArgs + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; + public class GetMysqlFunctionsRow { - public BiosBioType? BioType { get; set; } + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + public async Task GetMysqlFunctions() { - var queryParams = new Dictionary(); - queryParams.Add("bio_type", args.BioType); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); - } - - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateExtendedBiosSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetMysqlFunctionsSql, transaction: this.Transaction); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorDapperLegacyExample/Utils.cs b/examples/MySqlConnectorDapperLegacyExample/Utils.cs index f377d68b..47c4f482 100644 --- a/examples/MySqlConnectorDapperLegacyExample/Utils.cs +++ b/examples/MySqlConnectorDapperLegacyExample/Utils.cs @@ -30,8 +30,8 @@ public override void SetValue(IDbDataParameter parameter, JsonElement value) public static void ConfigureSqlMapper() { SqlMapper.AddTypeHandler(typeof(JsonElement), new JsonElementTypeHandler()); - SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); SqlMapper.AddTypeHandler(typeof(HashSet), new BiosAuthorTypeTypeHandler()); + SqlMapper.AddTypeHandler(typeof(HashSet), new MysqlStringTypesCSetTypeHandler()); } public static string TransformQueryForSliceArgs(string originalSql, int sliceSize, string paramName) @@ -40,31 +40,31 @@ public static string TransformQueryForSliceArgs(string originalSql, int sliceSiz return originalSql.Replace($"/*SLICE:{paramName}*/@{paramName}", string.Join(",", paramArgs)); } - private class MysqlStringTypesCSetTypeHandler : SqlMapper.TypeHandler> + private class BiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToMysqlStringTypesCSetSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToBiosAuthorTypeSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } } - private class BiosAuthorTypeTypeHandler : SqlMapper.TypeHandler> + private class MysqlStringTypesCSetTypeHandler : SqlMapper.TypeHandler> { - public override HashSet Parse(object value) + public override HashSet Parse(object value) { if (value is string s) - return s.ToBiosAuthorTypeSet(); - throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); + return s.ToMysqlStringTypesCSetSet(); + throw new DataException($"Cannot convert {value?.GetType()} to HashSet"); } - public override void SetValue(IDbDataParameter parameter, HashSet value) + public override void SetValue(IDbDataParameter parameter, HashSet value) { parameter.Value = string.Join(",", value); } diff --git a/examples/MySqlConnectorDapperLegacyExample/request.json b/examples/MySqlConnectorDapperLegacyExample/request.json index 680cf0e0..f2f8ce44 100644 --- a/examples/MySqlConnectorDapperLegacyExample/request.json +++ b/examples/MySqlConnectorDapperLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/types/schema.sql", - "examples/config/mysql/authors/schema.sql" + "examples/config/mysql/authors/schema.sql", + "examples/config/mysql/types/schema.sql" ], "queries": [ - "examples/config/mysql/types/query.sql", - "examples/config/mysql/authors/query.sql" + "examples/config/mysql/authors/query.sql", + "examples/config/mysql/types/query.sql" ], "codegen": { "out": "examples/MySqlConnectorDapperLegacyExample", @@ -25,6 +25,95 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "mysql_numeric_types" @@ -442,98 +531,25 @@ } } ] - }, + } + ], + "enums": [ { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } + "name": "bios_bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" ] }, { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } + "name": "bios_author_type", + "vals": [ + "Author", + "Editor", + "Translator" ] - } - ], - "enums": [ + }, { "name": "mysql_string_types_c_enum", "vals": [ @@ -549,22 +565,6 @@ "coffee", "milk" ] - }, - { - "name": "bios_bio_type", - "vals": [ - "Autobiography", - "Biography", - "Memoir" - ] - }, - { - "name": "bios_author_type", - "vals": [ - "Author", - "Editor", - "Translator" - ] } ] }, @@ -629,3145 +629,3145 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlNumericTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_bool", - "length": 1, + "name": "name", + "notNull": true, + "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - } - }, - { - "number": 2, - "column": { - "name": "c_boolean", - "length": 1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ { - "number": 3, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 4, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 5, - "column": { - "name": "c_mediumint", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 6, + "number": 1, "column": { - "name": "c_int", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, "type": { - "name": "int" - }, - "originalName": "c_int" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_integer", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, "type": { - "name": "int" - }, - "originalName": "c_integer" + "name": "integer" + } } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 8, + "number": 1, "column": { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" - } - }, - { - "number": 9, - "column": { - "name": "c_decimal", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - } - }, - { - "number": 10, - "column": { - "name": "c_dec", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - } - }, - { - "number": 11, - "column": { - "name": "c_numeric", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" + "originalName": "id" } }, { - "number": 12, + "number": 2, "column": { - "name": "c_fixed", - "length": 10, + "name": "name", + "notNull": true, + "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_fixed" + "originalName": "name" } }, { - "number": 13, + "number": 3, "column": { - "name": "c_float", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "float" + "name": "text" }, - "originalName": "c_float" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "parameters": [ { - "number": 14, + "number": 1, "column": { - "name": "c_double", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double" + "originalName": "name" } }, { - "number": 15, + "number": 2, "column": { - "name": "c_double_precision", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "bio" } } ], - "comments": [ - " Numeric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_numeric_types" + "name": "authors" } }, { - "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlNumericTypesBatch", - "cmd": ":copyfrom", + "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_bool", - "length": 1, + "name": "id", + "notNull": true, + "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "tinyint" + "name": "bigint" }, - "originalName": "c_bool" + "originalName": "id" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_boolean", - "length": 1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 3, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 4, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_smallint", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "smallint" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_mediumint", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "text" }, - "originalName": "c_mediumint" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 6, + "number": 1, "column": { - "name": "c_int", + "name": "bio", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "bio" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_bigint", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" + "isSqlcSlice": true, + "originalName": "id" } - }, - { - "number": 9, - "column": { - "name": "c_float", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" - } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "number": 10, - "column": { - "name": "c_numeric", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 11, - "column": { - "name": "c_decimal", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 12, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_dec", - "length": 10, + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "bigint" }, - "originalName": "c_dec" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 13, + "number": 2, "column": { - "name": "c_fixed", - "length": 10, + "name": "names", + "notNull": true, + "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_fixed" + "isSqlcSlice": true, + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", + "name": "CreateBook", + "cmd": ":execlastid", + "parameters": [ { - "number": 14, + "number": 1, "column": { - "name": "c_double", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "books" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double" + "originalName": "name" } }, { - "number": 15, + "number": 2, "column": { - "name": "c_double_precision", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "books" }, "type": { - "name": "double" + "name": "bigint" }, - "originalName": "c_double_precision" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_numeric_types" + "name": "books" } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", - "name": "GetMysqlNumericTypes", - "cmd": ":one", + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_smallint", + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "smallint" + "name": "bigint" }, - "originalName": "c_smallint" + "originalName": "id" }, { - "name": "c_mediumint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "text" }, - "originalName": "c_mediumint" + "originalName": "name" }, { - "name": "c_int", + "name": "bio", "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "bio" }, { - "name": "c_integer", + "name": "books", "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ { - "name": "c_bigint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "bigint" - }, - "originalName": "c_bigint" - }, + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ { - "name": "c_float", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" + "number": 1, + "column": { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + } }, { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" + "number": 3, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } }, { - "name": "c_numeric", - "length": 10, + "number": 4, + "column": { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "length": 100, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "decimal" + "name": "varchar" }, - "originalName": "c_numeric" + "originalName": "author_name" }, { - "name": "c_fixed", - "length": 10, + "name": "name", + "length": 100, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "decimal" + "name": "varchar" }, - "originalName": "c_fixed" + "originalName": "name" }, { - "name": "c_double", - "length": -1, + "name": "bio_type", + "length": 13, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "double" + "name": "bios_bio_type" }, - "originalName": "c_double" + "originalName": "bio_type" }, { - "name": "c_double_precision", - "length": -1, + "name": "author_type", + "length": 24, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "double" + "name": "bios_author_type" }, - "originalName": "c_double_precision" + "originalName": "author_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nLIMIT 1", - "name": "GetMysqlNumericTypesCnt", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", + "cmd": ":exec", + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "c_bool", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" } }, { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" - }, - { - "name": "c_int", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_int" - }, - { - "name": "c_integer", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - }, - { - "name": "c_bigint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "bigint" - }, - "originalName": "c_bigint" - }, - { - "name": "c_float", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" - }, - { - "name": "c_numeric", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" - }, - { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - }, - { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - }, - { - "name": "c_fixed", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_fixed" - }, - { - "name": "c_double", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double" - }, - { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double_precision" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_numeric_types", - "name": "TruncateMysqlNumericTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlStringTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 2, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "number": 2, + "column": { + "name": "c_boolean", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + } }, { "number": 3, "column": { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" } }, { "number": 4, "column": { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 5, "column": { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" } }, { "number": 6, "column": { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" } }, { "number": 7, "column": { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" } }, { "number": 8, "column": { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" } }, { "number": 9, "column": { - "name": "c_json", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json" + "originalName": "c_decimal" } }, { "number": 10, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 11, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 12, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_string_types" - } - }, - { - "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 2, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 3, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 4, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 5, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 6, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 7, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 8, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 9, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 10, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 11, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 12, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_string_types" - } - }, - { - "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", - "name": "GetMysqlStringTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - }, - { - "name": "c_nchar", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - }, - { - "name": "c_national_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_tinytext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - }, - { - "name": "c_mediumtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, - { - "name": "c_longtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - }, - { - "name": "c_set", - "length": 15, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - }, - { - "name": "c_nchar", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - }, - { - "name": "c_national_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_tinytext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - }, - { - "name": "c_mediumtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" + "column": { + "name": "c_dec", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" + } }, { - "name": "c_longtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" + "number": 11, + "column": { + "name": "c_numeric", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" + } }, { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" + "number": 12, + "column": { + "name": "c_fixed", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + } }, { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" + "number": 13, + "column": { + "name": "c_float", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + } }, { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" + "number": 14, + "column": { + "name": "c_double", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + } }, { - "name": "c_set", - "length": 15, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" + "number": 15, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" + } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_string_types", - "name": "TruncateMysqlStringTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Numeric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", - "name": "InsertMysqlDatetimeTypes", - "cmd": ":exec", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_year", - "length": -1, + "name": "c_bool", + "length": 1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "tinyint" }, - "originalName": "c_year" + "originalName": "c_bool" } }, { "number": 2, "column": { - "name": "c_date", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "tinyint" }, - "originalName": "c_date" + "originalName": "c_boolean" } }, { "number": 3, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "tinyint" }, - "originalName": "c_datetime" + "originalName": "c_tinyint" } }, { "number": 4, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_smallint", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "smallint" }, - "originalName": "c_timestamp" + "originalName": "c_smallint" } }, { "number": 5, "column": { - "name": "c_time", + "name": "c_mediumint", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" + } + }, + { + "number": 6, + "column": { + "name": "c_int", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" + } + }, + { + "number": 7, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" + } + }, + { + "number": 8, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" + } + }, + { + "number": 9, + "column": { + "name": "c_float", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + } + }, + { + "number": 10, + "column": { + "name": "c_numeric", "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "decimal" }, - "originalName": "c_time" + "originalName": "c_numeric" } - } - ], - "comments": [ - " Datetime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_datetime_types" - } - }, - { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", - "name": "InsertMysqlDatetimeTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 11, "column": { - "name": "c_year", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "decimal" }, - "originalName": "c_year" + "originalName": "c_decimal" } }, { - "number": 2, + "number": 12, "column": { - "name": "c_date", - "length": -1, + "name": "c_dec", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_dec" } }, { - "number": 3, + "number": 13, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_fixed", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_fixed" } }, { - "number": 4, + "number": 14, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_double", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "double" }, - "originalName": "c_timestamp" + "originalName": "c_double" } }, { - "number": 5, + "number": 15, "column": { - "name": "c_time", - "length": 10, + "name": "c_double_precision", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "double" }, - "originalName": "c_time" + "originalName": "c_double_precision" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" } }, { - "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", - "name": "GetMysqlDatetimeTypes", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" + }, + { + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + }, + { + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" + }, + { + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" + }, + { + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" + }, + { + "name": "c_int", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" + }, + { + "name": "c_integer", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" + }, + { + "name": "c_bigint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" + }, + { + "name": "c_float", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + }, + { + "name": "c_decimal", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_decimal" + }, + { + "name": "c_dec", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" + }, + { + "name": "c_numeric", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" + }, + { + "name": "c_fixed", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + }, + { + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + }, + { + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_year", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "tinyint" }, - "originalName": "c_year" + "originalName": "c_bool" }, { - "name": "c_date", + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + }, + { + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" + }, + { + "name": "c_smallint", "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "smallint" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_datetime", - "length": 19, + "name": "c_mediumint", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "mediumint" }, - "originalName": "c_datetime" + "originalName": "c_mediumint" }, { - "name": "c_timestamp", - "length": 19, + "name": "c_int", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "int" }, - "originalName": "c_timestamp" + "originalName": "c_int" }, { - "name": "c_time", - "length": 10, + "name": "c_integer", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "int" }, - "originalName": "c_time" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", - "name": "GetMysqlDatetimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_integer" + }, { - "name": "cnt", - "notNull": true, + "name": "c_bigint", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_numeric_types" + }, "type": { "name": "bigint" - } + }, + "originalName": "c_bigint" }, { - "name": "c_year", + "name": "c_float", "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "float" }, - "originalName": "c_year" + "originalName": "c_float" }, { - "name": "c_date", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_datetime", - "length": 19, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_decimal" }, { - "name": "c_timestamp", - "length": 19, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "decimal" }, - "originalName": "c_timestamp" + "originalName": "c_dec" }, { - "name": "c_time", + "name": "c_fixed", "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "decimal" }, - "originalName": "c_time" + "originalName": "c_fixed" + }, + { + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + }, + { + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_datetime_types", - "name": "TruncateMysqlDatetimeTypes", + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlBinaryTypes", + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bit", - "length": 8, + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_binary", - "length": 3, + "name": "c_text", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "c_text" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "longtext" }, - "originalName": "c_varbinary" + "originalName": "c_longtext" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_tinyblob", + "name": "c_json", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "json" }, - "originalName": "c_tinyblob" + "originalName": "c_json" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_blob", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "json" }, - "originalName": "c_blob" + "originalName": "c_json_string_override" } }, { - "number": 6, + "number": 11, "column": { - "name": "c_mediumblob", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_mediumblob" + "originalName": "c_enum" } }, { - "number": 7, + "number": 12, "column": { - "name": "c_longblob", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "mysql_string_types_c_set" }, - "originalName": "c_longblob" + "originalName": "c_set" } } ], "comments": [ - " Binary types " + " String types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" } }, { - "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlBinaryTypesBatch", + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_bit", - "length": 8, + "name": "c_char", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "char" }, - "originalName": "c_bit" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_binary", - "length": 3, + "name": "c_nchar", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "char" }, - "originalName": "c_binary" + "originalName": "c_nchar" } }, { "number": 3, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_national_char" } }, { "number": 4, "column": { - "name": "c_tinyblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "varchar" }, - "originalName": "c_tinyblob" + "originalName": "c_varchar" } }, { "number": 5, "column": { - "name": "c_blob", + "name": "c_tinytext", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "tinytext" }, - "originalName": "c_blob" + "originalName": "c_tinytext" } }, { "number": 6, "column": { - "name": "c_mediumblob", + "name": "c_mediumtext", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mediumtext" }, - "originalName": "c_mediumblob" + "originalName": "c_mediumtext" } }, { "number": 7, "column": { - "name": "c_longblob", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "text" }, - "originalName": "c_longblob" + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" } }, { - "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", - "name": "GetMysqlBinaryTypes", + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", "cmd": ":one", "columns": [ - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_binary_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_binary_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, + { + "name": "c_char", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_char" }, { - "name": "c_tinyblob", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "char" }, - "originalName": "c_tinyblob" + "originalName": "c_nchar" }, { - "name": "c_blob", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "char" }, - "originalName": "c_blob" + "originalName": "c_national_char" }, { - "name": "c_mediumblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "varchar" }, - "originalName": "c_mediumblob" + "originalName": "c_varchar" }, { - "name": "c_longblob", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "tinytext" }, - "originalName": "c_longblob" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", - "name": "GetMysqlBinaryTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_tinytext" }, { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" }, { - "name": "c_binary", - "length": 3, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "c_text" }, { - "name": "c_varbinary", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "longtext" }, - "originalName": "c_varbinary" + "originalName": "c_longtext" }, { - "name": "c_tinyblob", + "name": "c_json", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "json" }, - "originalName": "c_tinyblob" + "originalName": "c_json" }, { - "name": "c_blob", + "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "json" }, - "originalName": "c_blob" + "originalName": "c_json_string_override" }, { - "name": "c_mediumblob", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_mediumblob" + "originalName": "c_enum" }, { - "name": "c_longblob", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "mysql_string_types_c_set" }, - "originalName": "c_longblob" + "originalName": "c_set" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_binary_types", - "name": "TruncateMysqlBinaryTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_char", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_string_types" + }, "type": { - "name": "any" - } + "name": "char" + }, + "originalName": "c_char" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_nchar", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_string_types" + }, "type": { - "name": "any" - } - } - ], - "comments": [ - " Functions " - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ + "name": "char" + }, + "originalName": "c_nchar" + }, { - "name": "id", - "notNull": true, + "name": "c_national_char", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "char" }, - "originalName": "id" + "originalName": "c_national_char" }, { - "name": "name", - "notNull": true, + "name": "c_varchar", + "length": 100, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_tinytext", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "tinytext" }, - "originalName": "name" + "originalName": "c_tinytext" }, { - "name": "bio", + "name": "c_mediumtext", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "mysql_string_types" }, "type": { "name": "text" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ + "name": "c_longtext", + "length": -1, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + }, { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "mysql_string_types_c_enum" }, - "originalName": "bio" + "originalName": "c_enum" + }, + { + "name": "c_set", + "length": 15, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "limit", - "notNull": true, + "name": "c_year", "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, "type": { - "name": "integer" - } + "name": "year" + }, + "originalName": "c_year" } }, { "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_date", "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, "type": { - "name": "integer" - } + "name": "date" + }, + "originalName": "c_date" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "bigint" + "name": "datetime" }, - "originalName": "id" + "originalName": "c_datetime" } }, { - "number": 2, + "number": 4, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } }, { - "number": 3, + "number": 5, "column": { - "name": "bio", - "length": -1, + "name": "c_time", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "time" }, - "originalName": "bio" + "originalName": "c_time" } } ], + "comments": [ + " Datetime types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "mysql_datetime_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_year", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "year" }, - "originalName": "name" + "originalName": "c_year" } }, { "number": 2, "column": { - "name": "bio", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "bio" + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_year", + "length": -1, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + }, + { + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } + ], + "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { "name": "bigint" - }, - "originalName": "id" + } }, { - "name": "name", - "notNull": true, + "name": "c_year", "length": -1, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "year" }, - "originalName": "name" + "originalName": "c_year" }, { - "name": "bio", + "name": "c_date", "length": -1, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_date" + }, { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_datetime", + "length": 19, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "bigint" + "name": "datetime" }, - "originalName": "id" + "originalName": "c_datetime" }, { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_timestamp", + "length": 19, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" }, { - "name": "bio", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "time" }, - "originalName": "bio" + "originalName": "c_time" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, + "name": "c_bit", + "length": 8, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "bit" }, - "originalName": "name" + "originalName": "c_bit" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_binary", + "length": 3, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "binary" }, - "originalName": "name" + "originalName": "c_binary" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", - "length": -1, - "isNamedParam": true, + "name": "c_varbinary", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "varbinary" }, - "originalName": "bio" + "originalName": "c_varbinary" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, { - "number": 1, + "number": 7, "column": { - "name": "ids", - "notNull": true, + "name": "c_longblob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "longblob" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_longblob" } } ], - "filename": "query.sql" + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, { - "number": 1, + "number": 4, "column": { - "name": "ids", - "notNull": true, + "name": "c_tinyblob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "tinyblob" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_tinyblob" } }, { - "number": 2, + "number": 5, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "blob" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", - "name": "CreateBook", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "mediumblob" }, - "originalName": "name" + "originalName": "c_mediumblob" } }, { - "number": 2, + "number": 7, "column": { - "name": "author_id", - "notNull": true, + "name": "c_longblob", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "longblob" }, - "originalName": "author_id" + "originalName": "c_longblob" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "mysql_binary_types" } }, { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", "columns": [ { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, { - "name": "id", - "notNull": true, + "name": "c_tinyblob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "tinyblob" }, - "originalName": "id" + "originalName": "c_tinyblob" }, { - "name": "name", - "notNull": true, + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "blob" }, - "originalName": "name" + "originalName": "c_blob" }, { - "name": "bio", + "name": "c_mediumblob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "mediumblob" }, - "originalName": "bio" + "originalName": "c_mediumblob" }, { - "name": "books", + "name": "c_longblob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } }, { - "number": 2, - "column": { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - } + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" }, { - "number": 3, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" }, { - "number": 4, - "column": { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "schema": "extended", - "name": "bios" - } - }, - { - "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", - "name": "GetFirstExtendedBioByType", - "cmd": ":one", - "columns": [ - { - "name": "author_name", - "length": 100, + "name": "c_varbinary", + "length": 10, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "varchar" + "name": "varbinary" }, - "originalName": "author_name" + "originalName": "c_varbinary" }, { - "name": "name", - "length": 100, + "name": "c_tinyblob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "varchar" + "name": "tinyblob" }, - "originalName": "name" + "originalName": "c_tinyblob" }, { - "name": "bio_type", - "length": 13, + "name": "c_blob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "bios_bio_type" + "name": "blob" }, - "originalName": "bio_type" + "originalName": "c_blob" }, { - "name": "author_type", - "length": 24, + "name": "c_mediumblob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "bios_author_type" + "name": "mediumblob" }, - "originalName": "author_type" - } - ], - "parameters": [ + "originalName": "c_mediumblob" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorDapperLegacyExample/request.message b/examples/MySqlConnectorDapperLegacyExample/request.message index 246e45d1020683ecc7f60136349900b3102bfb7b..31569fb1374ec2341951399eee6d8bd1f52aef68 100644 GIT binary patch delta 109 zcmdmSl5xjLMt-hlHZDdZA=cc=;=&v$jfsh36X$X86enk-<|gWagw!S`%1qSeVp38E z%Lz`5SKqikk7@I6ZX-q(sr77JlMgD3Z8l?W;h5~pn=<(wPr~Gly!SV+^FJW8xt~wY KadTdh7ZU)Rj3qY! delta 100 zcmdmSl5xjLMt-hlHZDdZA=cc=;=&v$wTX#h6Bn@Y6enk-<|gWagfu25%1zYeVp38C z%kfW)*W9>1k7;uN%Uq7lGR($|lLL4YC%@xKp1hIw{^meMS;x)&d? CSet); public readonly record struct MysqlDatetimeType(short? CYear, DateTime? CDate, DateTime? CDatetime, DateTime? CTimestamp, TimeSpan? CTime); public readonly record struct MysqlBinaryType(byte? CBit, byte[]? CBinary, byte[]? CVarbinary, byte[]? CTinyblob, byte[]? CBlob, byte[]? CMediumblob, byte[]? CLongblob); -public readonly record struct Author(long Id, string Name, string? Bio); -public readonly record struct Book(long Id, string Name, long AuthorId, string? Description); public readonly record struct ExtendedBio(string? AuthorName, string? Name, BiosBioType? BioType, HashSet? AuthorType); -public enum MysqlStringTypesCEnum +public enum BiosBioType { Invalid = 0, // reserved for invalid enum value - Small = 1, - Medium = 2, - Big = 3 + Autobiography = 1, + Biography = 2, + Memoir = 3 } -public static class MysqlStringTypesCEnumExtensions +public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCEnum.Invalid, - ["small"] = MysqlStringTypesCEnum.Small, - ["medium"] = MysqlStringTypesCEnum.Medium, - ["big"] = MysqlStringTypesCEnum.Big + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCEnum.Invalid] = string.Empty, - [MysqlStringTypesCEnum.Small] = "small", - [MysqlStringTypesCEnum.Medium] = "medium", - [MysqlStringTypesCEnum.Big] = "big" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCEnum me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCEnumSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum MysqlStringTypesCSet +public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value - Tea = 1, - Coffee = 2, - Milk = 3 + Author = 1, + Editor = 2, + Translator = 3 } -public static class MysqlStringTypesCSetExtensions +public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCSet.Invalid, - ["tea"] = MysqlStringTypesCSet.Tea, - ["coffee"] = MysqlStringTypesCSet.Coffee, - ["milk"] = MysqlStringTypesCSet.Milk + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCSet.Invalid] = string.Empty, - [MysqlStringTypesCSet.Tea] = "tea", - [MysqlStringTypesCSet.Coffee] = "coffee", - [MysqlStringTypesCSet.Milk] = "milk" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCSet me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCSetSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum BiosBioType +public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value - Autobiography = 1, - Biography = 2, - Memoir = 3 + Small = 1, + Medium = 2, + Big = 3 } -public static class BiosBioTypeExtensions +public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosBioType.Invalid, - ["Autobiography"] = BiosBioType.Autobiography, - ["Biography"] = BiosBioType.Biography, - ["Memoir"] = BiosBioType.Memoir + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosBioType.Invalid] = string.Empty, - [BiosBioType.Autobiography] = "Autobiography", - [BiosBioType.Biography] = "Biography", - [BiosBioType.Memoir] = "Memoir" + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" }; - public static BiosBioType ToBiosBioType(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosBioType me) + public static string Stringify(this MysqlStringTypesCEnum me) { return EnumToString[me]; } - public static HashSet ToBiosBioTypeSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } -public enum BiosAuthorType +public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value - Author = 1, - Editor = 2, - Translator = 3 + Tea = 1, + Coffee = 2, + Milk = 3 } -public static class BiosAuthorTypeExtensions +public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosAuthorType.Invalid, - ["Author"] = BiosAuthorType.Author, - ["Editor"] = BiosAuthorType.Editor, - ["Translator"] = BiosAuthorType.Translator + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosAuthorType.Invalid] = string.Empty, - [BiosAuthorType.Author] = "Author", - [BiosAuthorType.Editor] = "Editor", - [BiosAuthorType.Translator] = "Translator" + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" }; - public static BiosAuthorType ToBiosAuthorType(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosAuthorType me) + public static string Stringify(this MysqlStringTypesCSet me) { return EnumToString[me]; } - public static HashSet ToBiosAuthorTypeSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } \ No newline at end of file diff --git a/examples/MySqlConnectorExample/request.json b/examples/MySqlConnectorExample/request.json index 77aa8b66..cfae6b86 100644 --- a/examples/MySqlConnectorExample/request.json +++ b/examples/MySqlConnectorExample/request.json @@ -3,8 +3,8 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/types/schema.sql", - "examples/config/mysql/authors/schema.sql" + "examples/config/mysql/authors/schema.sql", + "examples/config/mysql/types/schema.sql" ], "queries": [ "examples/config/mysql/authors/query.sql", @@ -25,6 +25,95 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "mysql_numeric_types" @@ -442,98 +531,25 @@ } } ] - }, + } + ], + "enums": [ { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } + "name": "bios_bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" ] }, { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } + "name": "bios_author_type", + "vals": [ + "Author", + "Editor", + "Translator" ] - } - ], - "enums": [ + }, { "name": "mysql_string_types_c_enum", "vals": [ @@ -549,22 +565,6 @@ "coffee", "milk" ] - }, - { - "name": "bios_bio_type", - "vals": [ - "Autobiography", - "Biography", - "Memoir" - ] - }, - { - "name": "bios_author_type", - "vals": [ - "Author", - "Editor", - "Translator" - ] } ] }, diff --git a/examples/MySqlConnectorExample/request.message b/examples/MySqlConnectorExample/request.message index 04474b545b58bd50785deeacb30654ba021275c7..1d6789697eff1ede035d6bd055da556358e837ac 100644 GIT binary patch delta 74 zcmZoW$=G(1k)P`cD;J}Y5NmE_abb>>#>7N9p5o+;)Z9cp5MOO#qS(X*Y!kJ)Hpb6q d+B}Ebh;j2@rg|YzpXW)~Jc;*97yycv8SwxB delta 74 zcmZoW$=G(1k)P`cD;J}Y5NmE_abb>>+QdY$i3`|xijy-^a})JILK+hj AuthorType { get; set; } }; - public enum MysqlStringTypesCEnum + public enum BiosBioType { Invalid = 0, // reserved for invalid enum value - Small = 1, - Medium = 2, - Big = 3 + Autobiography = 1, + Biography = 2, + Memoir = 3 } - public static class MysqlStringTypesCEnumExtensions + public static class BiosBioTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCEnum.Invalid, - ["small"] = MysqlStringTypesCEnum.Small, - ["medium"] = MysqlStringTypesCEnum.Medium, - ["big"] = MysqlStringTypesCEnum.Big + [string.Empty] = BiosBioType.Invalid, + ["Autobiography"] = BiosBioType.Autobiography, + ["Biography"] = BiosBioType.Biography, + ["Memoir"] = BiosBioType.Memoir }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCEnum.Invalid] = string.Empty, - [MysqlStringTypesCEnum.Small] = "small", - [MysqlStringTypesCEnum.Medium] = "medium", - [MysqlStringTypesCEnum.Big] = "big" + [BiosBioType.Invalid] = string.Empty, + [BiosBioType.Autobiography] = "Autobiography", + [BiosBioType.Biography] = "Biography", + [BiosBioType.Memoir] = "Memoir" }; - public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) + public static BiosBioType ToBiosBioType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCEnum me) + public static string Stringify(this BiosBioType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCEnumSet(this string me) + public static HashSet ToBiosBioTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum MysqlStringTypesCSet + public enum BiosAuthorType { Invalid = 0, // reserved for invalid enum value - Tea = 1, - Coffee = 2, - Milk = 3 + Author = 1, + Editor = 2, + Translator = 3 } - public static class MysqlStringTypesCSetExtensions + public static class BiosAuthorTypeExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = MysqlStringTypesCSet.Invalid, - ["tea"] = MysqlStringTypesCSet.Tea, - ["coffee"] = MysqlStringTypesCSet.Coffee, - ["milk"] = MysqlStringTypesCSet.Milk + [string.Empty] = BiosAuthorType.Invalid, + ["Author"] = BiosAuthorType.Author, + ["Editor"] = BiosAuthorType.Editor, + ["Translator"] = BiosAuthorType.Translator }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [MysqlStringTypesCSet.Invalid] = string.Empty, - [MysqlStringTypesCSet.Tea] = "tea", - [MysqlStringTypesCSet.Coffee] = "coffee", - [MysqlStringTypesCSet.Milk] = "milk" + [BiosAuthorType.Invalid] = string.Empty, + [BiosAuthorType.Author] = "Author", + [BiosAuthorType.Editor] = "Editor", + [BiosAuthorType.Translator] = "Translator" }; - public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) + public static BiosAuthorType ToBiosAuthorType(this string me) { return StringToEnum[me]; } - public static string Stringify(this MysqlStringTypesCSet me) + public static string Stringify(this BiosAuthorType me) { return EnumToString[me]; } - public static HashSet ToMysqlStringTypesCSetSet(this string me) + public static HashSet ToBiosAuthorTypeSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum BiosBioType + public enum MysqlStringTypesCEnum { Invalid = 0, // reserved for invalid enum value - Autobiography = 1, - Biography = 2, - Memoir = 3 + Small = 1, + Medium = 2, + Big = 3 } - public static class BiosBioTypeExtensions + public static class MysqlStringTypesCEnumExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosBioType.Invalid, - ["Autobiography"] = BiosBioType.Autobiography, - ["Biography"] = BiosBioType.Biography, - ["Memoir"] = BiosBioType.Memoir + [string.Empty] = MysqlStringTypesCEnum.Invalid, + ["small"] = MysqlStringTypesCEnum.Small, + ["medium"] = MysqlStringTypesCEnum.Medium, + ["big"] = MysqlStringTypesCEnum.Big }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosBioType.Invalid] = string.Empty, - [BiosBioType.Autobiography] = "Autobiography", - [BiosBioType.Biography] = "Biography", - [BiosBioType.Memoir] = "Memoir" + [MysqlStringTypesCEnum.Invalid] = string.Empty, + [MysqlStringTypesCEnum.Small] = "small", + [MysqlStringTypesCEnum.Medium] = "medium", + [MysqlStringTypesCEnum.Big] = "big" }; - public static BiosBioType ToBiosBioType(this string me) + public static MysqlStringTypesCEnum ToMysqlStringTypesCEnum(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosBioType me) + public static string Stringify(this MysqlStringTypesCEnum me) { return EnumToString[me]; } - public static HashSet ToBiosBioTypeSet(this string me) + public static HashSet ToMysqlStringTypesCEnumSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } - public enum BiosAuthorType + public enum MysqlStringTypesCSet { Invalid = 0, // reserved for invalid enum value - Author = 1, - Editor = 2, - Translator = 3 + Tea = 1, + Coffee = 2, + Milk = 3 } - public static class BiosAuthorTypeExtensions + public static class MysqlStringTypesCSetExtensions { - private static readonly Dictionary StringToEnum = new Dictionary() + private static readonly Dictionary StringToEnum = new Dictionary() { - [string.Empty] = BiosAuthorType.Invalid, - ["Author"] = BiosAuthorType.Author, - ["Editor"] = BiosAuthorType.Editor, - ["Translator"] = BiosAuthorType.Translator + [string.Empty] = MysqlStringTypesCSet.Invalid, + ["tea"] = MysqlStringTypesCSet.Tea, + ["coffee"] = MysqlStringTypesCSet.Coffee, + ["milk"] = MysqlStringTypesCSet.Milk }; - private static readonly Dictionary EnumToString = new Dictionary() + private static readonly Dictionary EnumToString = new Dictionary() { - [BiosAuthorType.Invalid] = string.Empty, - [BiosAuthorType.Author] = "Author", - [BiosAuthorType.Editor] = "Editor", - [BiosAuthorType.Translator] = "Translator" + [MysqlStringTypesCSet.Invalid] = string.Empty, + [MysqlStringTypesCSet.Tea] = "tea", + [MysqlStringTypesCSet.Coffee] = "coffee", + [MysqlStringTypesCSet.Milk] = "milk" }; - public static BiosAuthorType ToBiosAuthorType(this string me) + public static MysqlStringTypesCSet ToMysqlStringTypesCSet(this string me) { return StringToEnum[me]; } - public static string Stringify(this BiosAuthorType me) + public static string Stringify(this MysqlStringTypesCSet me) { return EnumToString[me]; } - public static HashSet ToBiosAuthorTypeSet(this string me) + public static HashSet ToMysqlStringTypesCSetSet(this string me) { - return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); + return new HashSet(me.Split(',').ToList().Select(v => StringToEnum[v])); } } } \ No newline at end of file diff --git a/examples/MySqlConnectorLegacyExample/QuerySql.cs b/examples/MySqlConnectorLegacyExample/QuerySql.cs index 09d0d17d..98a4ae16 100644 --- a/examples/MySqlConnectorLegacyExample/QuerySql.cs +++ b/examples/MySqlConnectorLegacyExample/QuerySql.cs @@ -42,199 +42,36 @@ public static QuerySql WithTransaction(MySqlTransaction transaction) private MySqlTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; - public class InsertMysqlNumericTypesArgs - { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CFloat { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } - }; - public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlNumericTypesSql, connection)) - { - command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertMysqlNumericTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - public class InsertMysqlNumericTypesBatchArgs + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; + public class GetAuthorRow { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlNumericTypesBatch(List args) - { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) - { - var options = new TypeConverterOptions - { - Formats = new[] - { - supportedDateTimeFormat - } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); - } - - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) - { - Local = true, - TableName = "mysql_numeric_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); - await loader.LoadAsync(); - await connection.CloseAsync(); - } - } - - private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; - public class GetMysqlNumericTypesRow + public class GetAuthorArgs { - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public string Name { get; set; } }; - public async Task GetMysqlNumericTypes() + public async Task GetAuthor(GetAuthorArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlNumericTypesSql, connection)) + using (var command = new MySqlCommand(GetAuthorSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlNumericTypesRow + return new GetAuthorRow { - CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), - CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), - CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -248,29 +85,18 @@ public async Task GetMysqlNumericTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlNumericTypesSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlNumericTypesRow + return new GetAuthorRow { - CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), - CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), - CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), - CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -279,166 +105,77 @@ public async Task GetMysqlNumericTypes() return null; } - private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; - public class GetMysqlNumericTypesCntRow + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public class ListAuthorsRow { - public long Cnt { get; set; } - public bool? CBool { get; set; } - public bool? CBoolean { get; set; } - public short? CTinyint { get; set; } - public short? CSmallint { get; set; } - public int? CMediumint { get; set; } - public int? CInt { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public double? CFloat { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public decimal? CDec { get; set; } - public decimal? CFixed { get; set; } - public double? CDouble { get; set; } - public double? CDoublePrecision { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlNumericTypesCnt() + public class ListAuthorsArgs + { + public int Limit { get; set; } + public int Offset { get; set; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlNumericTypesCntSql, connection)) + using (var command = new MySqlCommand(ListAuthorsSql, connection)) { + command.Parameters.AddWithValue("@limit", args.Limit); + command.Parameters.AddWithValue("@offset", args.Offset); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlNumericTypesCntRow - { - Cnt = reader.GetInt64(0), - CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), - CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), - CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlNumericTypesCntSql; + command.CommandText = ListAuthorsSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@limit", args.Limit); + command.Parameters.AddWithValue("@offset", args.Offset); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlNumericTypesCntRow - { - Cnt = reader.GetInt64(0), - CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), - CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), - CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), - CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), - CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), - CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), - CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), - CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), - CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), - CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), - CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), - CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), - CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), - CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) - }; - } - } - } - - return null; - } - - private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; - public async Task TruncateMysqlNumericTypes() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlNumericTypesSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncateMysqlNumericTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); } } - private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; - public class InsertMysqlStringTypesArgs + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; + public class CreateAuthorArgs { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlStringTypesSql, connection)) + using (var command = new MySqlCommand(CreateAuthorSql, connection)) { - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -450,130 +187,81 @@ public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlStringTypesSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public class InsertMysqlStringTypesBatchArgs + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio)"; + public class CreateAuthorReturnIdArgs { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlStringTypesBatch(List args) + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] + await connection.OpenAsync(); + using (var command = new MySqlCommand(CreateAuthorReturnIdSql, connection)) { - supportedDateTimeFormat + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) - { - Local = true, - TableName = "mysql_string_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + command.CommandText = CreateAuthorReturnIdSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } - private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; - public class GetMysqlStringTypesRow + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow { - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlStringTypes() + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlStringTypesSql, connection)) + using (var command = new MySqlCommand(GetAuthorByIdSql, connection)) { + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlStringTypesRow + return new GetAuthorByIdRow { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), - CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), - CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), - CText = reader.IsDBNull(6) ? null : reader.GetString(6), - CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), - CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), - CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), - CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), - CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -587,26 +275,18 @@ public async Task GetMysqlStringTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlStringTypesSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlStringTypesRow + return new GetAuthorByIdRow { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), - CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), - CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), - CText = reader.IsDBNull(6) ? null : reader.GetString(6), - CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), - CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), - CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), - CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), - CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -615,103 +295,70 @@ public async Task GetMysqlStringTypes() return null; } - private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; - public class GetMysqlStringTypesCntRow + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public class GetAuthorByNamePatternRow { - public long Cnt { get; set; } - public string CChar { get; set; } - public string CNchar { get; set; } - public string CNationalChar { get; set; } - public string CVarchar { get; set; } - public string CTinytext { get; set; } - public string CMediumtext { get; set; } - public string CText { get; set; } - public string CLongtext { get; set; } - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public MysqlStringTypesCEnum? CEnum { get; set; } - public HashSet CSet { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlStringTypesCnt() + public class GetAuthorByNamePatternArgs + { + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlStringTypesCntSql, connection)) + using (var command = new MySqlCommand(GetAuthorByNamePatternSql, connection)) { + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlStringTypesCntRow - { - Cnt = reader.GetInt64(0), - CChar = reader.IsDBNull(1) ? null : reader.GetString(1), - CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), - CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), - CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), - CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), - CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), - CText = reader.IsDBNull(7) ? null : reader.GetString(7), - CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), - CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), - CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), - CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), - CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetMysqlStringTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetMysqlStringTypesCntRow - { - Cnt = reader.GetInt64(0), - CChar = reader.IsDBNull(1) ? null : reader.GetString(1), - CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), - CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), - CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), - CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), - CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), - CText = reader.IsDBNull(7) ? null : reader.GetString(7), - CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), - CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), - CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), - CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), - CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() - }; + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorByNamePatternSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } } - private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; - public async Task TruncateMysqlStringTypes() + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlStringTypesSql, connection)) + using (var command = new MySqlCommand(DeleteAuthorSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -723,35 +370,23 @@ public async Task TruncateMysqlStringTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlStringTypesSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; - public class InsertMysqlDatetimeTypesArgs - { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } - }; - public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlDatetimeTypesSql, connection)) + using (var command = new MySqlCommand(DeleteAllAuthorsSql, connection)) { - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -763,440 +398,399 @@ public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlDatetimeTypesSql; + command.CommandText = DeleteAllAuthorsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - public class InsertMysqlDatetimeTypesBatchArgs + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public class UpdateAuthorsArgs { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public string Bio { get; set; } }; - public async Task InsertMysqlDatetimeTypesBatch(List args) + public async Task UpdateAuthors(UpdateAuthorsArgs args) { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] + await connection.OpenAsync(); + using (var command = new MySqlCommand(UpdateAuthorsSql, connection)) { - supportedDateTimeFormat + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); - await csvWriter.WriteRecordsAsync(args); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) - { - Local = true, - TableName = "mysql_datetime_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + command.CommandText = UpdateAuthorsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } } - private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; - public class GetMysqlDatetimeTypesRow + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; + public class GetAuthorsByIdsRow { - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlDatetimeTypes() + public class GetAuthorsByIdsArgs + { + public long[] Ids { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { + var transformedSql = GetAuthorsByIdsSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlDatetimeTypesSql, connection)) + using (var command = new MySqlCommand(transformedSql, connection)) { + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlDatetimeTypesRow - { - CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), - CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlDatetimeTypesSql; + command.CommandText = transformedSql; command.Transaction = this.Transaction; + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlDatetimeTypesRow - { - CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), - CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; - public class GetMysqlDatetimeTypesCntRow + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; + public class GetAuthorsByIdsAndNamesRow { - public long Cnt { get; set; } - public short? CYear { get; set; } - public DateTime? CDate { get; set; } - public DateTime? CDatetime { get; set; } - public DateTime? CTimestamp { get; set; } - public TimeSpan? CTime { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetMysqlDatetimeTypesCnt() + public class GetAuthorsByIdsAndNamesArgs + { + public long[] Ids { get; set; } + public string[] Names { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { + var transformedSql = GetAuthorsByIdsAndNamesSql; + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); + transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlDatetimeTypesCntSql, connection)) + using (var command = new MySqlCommand(transformedSql, connection)) { + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlDatetimeTypesCntRow - { - Cnt = reader.GetInt64(0), - CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), - CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), - CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlDatetimeTypesCntSql; + command.CommandText = transformedSql; command.Transaction = this.Transaction; + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlDatetimeTypesCntRow - { - Cnt = reader.GetInt64(0), - CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), - CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), - CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; - public async Task TruncateMysqlDatetimeTypes() + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id)"; + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlDatetimeTypesSql, connection)) + using (var command = new MySqlCommand(CreateBookSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlDatetimeTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); await command.ExecuteNonQueryAsync(); + return command.LastInsertedId; } } - private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; - public class InsertMysqlBinaryTypesArgs + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public Author Author { get; set; } + public Book Book { get; set; } }; - public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(InsertMysqlBinaryTypesSql, connection)) + using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) { - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertMysqlBinaryTypesSql; + command.CommandText = ListAllAuthorsBooksSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - public class InsertMysqlBinaryTypesBatchArgs + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task InsertMysqlBinaryTypesBatch(List args) + public async Task> GetDuplicateAuthors() { - const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; - var config = new CsvConfiguration(CultureInfo.CurrentCulture) - { - Delimiter = ",", - NewLine = "\n" - }; - var nullConverterFn = new Utils.NullToStringCsvConverter(); - using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) - using (var csvWriter = new CsvWriter(writer, config)) + if (this.Transaction == null) { - var options = new TypeConverterOptions + using (var connection = new MySqlConnection(ConnectionString)) { - Formats = new[] + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) { - supportedDateTimeFormat + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } } - }; - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); - csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); - await csvWriter.WriteRecordsAsync(args); + } } - using (var connection = new MySqlConnection(ConnectionString)) + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) { - await connection.OpenAsync(); - var loader = new MySqlBulkLoader(connection) + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) { - Local = true, - TableName = "mysql_binary_types", - FileName = "input.csv", - FieldTerminator = ",", - FieldQuotationCharacter = '"', - FieldQuotationOptional = true, - NumberOfLinesToSkip = 1, - LineTerminator = "\n" - }; - loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); - await loader.LoadAsync(); - await connection.CloseAsync(); + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } } } - private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; - public class GetMysqlBinaryTypesRow + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } }; - public async Task GetMysqlBinaryTypes() + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlBinaryTypesSql, connection)) + using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) { + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetMysqlBinaryTypesRow - { - CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), - CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlBinaryTypesSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } + + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; + public class CreateExtendedBioArgs + { + public string AuthorName { get; set; } + public string Name { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } + }; + public async Task CreateExtendedBio(CreateExtendedBioArgs args) + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(CreateExtendedBioSql, connection)) { - return new GetMysqlBinaryTypesRow - { - CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), - CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; + command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateExtendedBioSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } } - private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; - public class GetMysqlBinaryTypesCntRow + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow { - public long Cnt { get; set; } - public byte? CBit { get; set; } - public byte[] CBinary { get; set; } - public byte[] CVarbinary { get; set; } - public byte[] CTinyblob { get; set; } - public byte[] CBlob { get; set; } - public byte[] CMediumblob { get; set; } - public byte[] CLongblob { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public BiosBioType? BioType { get; set; } + public HashSet AuthorType { get; set; } }; - public async Task GetMysqlBinaryTypesCnt() + public class GetFirstExtendedBioByTypeArgs + { + public BiosBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlBinaryTypesCntSql, connection)) + using (var command = new MySqlCommand(GetFirstExtendedBioByTypeSql, connection)) { + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlBinaryTypesCntRow + return new GetFirstExtendedBioByTypeRow { - Cnt = reader.GetInt64(0), - CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), - CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), - CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), + Name = reader.IsDBNull(1) ? null : reader.GetString(1), + BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } @@ -1210,22 +804,19 @@ public async Task GetMysqlBinaryTypesCnt() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlBinaryTypesCntSql; + command.CommandText = GetFirstExtendedBioByTypeSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlBinaryTypesCntRow + return new GetFirstExtendedBioByTypeRow { - Cnt = reader.GetInt64(0), - CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), - CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), - CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), + Name = reader.IsDBNull(1) ? null : reader.GetString(1), + BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), + AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() }; } } @@ -1234,15 +825,15 @@ public async Task GetMysqlBinaryTypesCnt() return null; } - private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; - public async Task TruncateMysqlBinaryTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateMysqlBinaryTypesSql, connection)) + using (var command = new MySqlCommand(TruncateExtendedBiosSql, connection)) { await command.ExecuteNonQueryAsync(); } @@ -1255,37 +846,205 @@ public async Task TruncateMysqlBinaryTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateMysqlBinaryTypesSql; + command.CommandText = TruncateExtendedBiosSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; - public class GetMysqlFunctionsRow + private const string InsertMysqlNumericTypesSql = " INSERT INTO mysql_numeric_types ( c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_decimal, c_dec, c_numeric, c_fixed, c_float, c_double, c_double_precision ) VALUES (@c_bool, @c_boolean, @c_tinyint, @c_smallint, @c_mediumint, @c_int, @c_integer, @c_bigint, @c_decimal, @c_dec, @c_numeric, @c_fixed, @c_float, @c_double, @c_double_precision)"; + public class InsertMysqlNumericTypesArgs { - public int? MaxInt { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CFloat { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task GetMysqlFunctions() + public async Task InsertMysqlNumericTypes(InsertMysqlNumericTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) + using (var command = new MySqlCommand(InsertMysqlNumericTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertMysqlNumericTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_bool", args.CBool ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyint", args.CTinyint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumint", args.CMediumint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_int", args.CInt ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_dec", args.CDec ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_fixed", args.CFixed ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_float", args.CFloat ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double", args.CDouble ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + public class InsertMysqlNumericTypesBatchArgs + { + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + }; + public async Task InsertMysqlNumericTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.BoolToBitCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_numeric_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bool", "c_boolean", "c_tinyint", "c_smallint", "c_mediumint", "c_int", "c_integer", "c_bigint", "c_float", "c_numeric", "c_decimal", "c_dec", "c_fixed", "c_double", "c_double_precision" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlNumericTypesSql = "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1"; + public class GetMysqlNumericTypesRow + { + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } + }; + public async Task GetMysqlNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new MySqlCommand(GetMysqlNumericTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlNumericTypesRow { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), + CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), + CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) }; } } @@ -1299,17 +1058,29 @@ public async Task GetMysqlFunctions() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetMysqlFunctionsSql; + command.CommandText = GetMysqlNumericTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetMysqlFunctionsRow + return new GetMysqlNumericTypesRow { - MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + CBool = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CTinyint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CSmallint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CMediumint = reader.IsDBNull(4) ? (int? )null : reader.GetInt32(4), + CInt = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInteger = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CBigint = reader.IsDBNull(7) ? (long? )null : reader.GetInt64(7), + CFloat = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CDecimal = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CDec = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CNumeric = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CFixed = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CDouble = reader.IsDBNull(13) ? (double? )null : reader.GetDouble(13), + CDoublePrecision = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14) }; } } @@ -1318,36 +1089,57 @@ public async Task GetMysqlFunctions() return null; } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorArgs + private const string GetMysqlNumericTypesCntSql = "SELECT COUNT(*) AS cnt, c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision FROM mysql_numeric_types GROUP BY c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_numeric, c_decimal, c_dec, c_fixed, c_double, c_double_precision LIMIT 1"; + public class GetMysqlNumericTypesCntRow { - public string Name { get; set; } + public long Cnt { get; set; } + public bool? CBool { get; set; } + public bool? CBoolean { get; set; } + public short? CTinyint { get; set; } + public short? CSmallint { get; set; } + public int? CMediumint { get; set; } + public int? CInt { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public double? CFloat { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CDec { get; set; } + public decimal? CFixed { get; set; } + public double? CDouble { get; set; } + public double? CDoublePrecision { get; set; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task GetMysqlNumericTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorSql, connection)) + using (var command = new MySqlCommand(GetMysqlNumericTypesCntSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow + return new GetMysqlNumericTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + Cnt = reader.GetInt64(0), + CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), + CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) }; } } @@ -1361,18 +1153,30 @@ public async Task GetAuthor(GetAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; + command.CommandText = GetMysqlNumericTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow + return new GetMysqlNumericTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + Cnt = reader.GetInt64(0), + CBool = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CBoolean = reader.IsDBNull(2) ? (bool? )null : reader.GetBoolean(2), + CTinyint = reader.IsDBNull(3) ? (short? )null : reader.GetInt16(3), + CSmallint = reader.IsDBNull(4) ? (short? )null : reader.GetInt16(4), + CMediumint = reader.IsDBNull(5) ? (int? )null : reader.GetInt32(5), + CInt = reader.IsDBNull(6) ? (int? )null : reader.GetInt32(6), + CInteger = reader.IsDBNull(7) ? (int? )null : reader.GetInt32(7), + CBigint = reader.IsDBNull(8) ? (long? )null : reader.GetInt64(8), + CFloat = reader.IsDBNull(9) ? (double? )null : reader.GetDouble(9), + CNumeric = reader.IsDBNull(10) ? (decimal? )null : reader.GetDecimal(10), + CDecimal = reader.IsDBNull(11) ? (decimal? )null : reader.GetDecimal(11), + CDec = reader.IsDBNull(12) ? (decimal? )null : reader.GetDecimal(12), + CFixed = reader.IsDBNull(13) ? (decimal? )null : reader.GetDecimal(13), + CDouble = reader.IsDBNull(14) ? (double? )null : reader.GetDouble(14), + CDoublePrecision = reader.IsDBNull(15) ? (double? )null : reader.GetDouble(15) }; } } @@ -1381,77 +1185,70 @@ public async Task GetAuthor(GetAuthorArgs args) return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs - { - public int Limit { get; set; } - public int Offset { get; set; } - }; - public async Task> ListAuthors(ListAuthorsArgs args) + private const string TruncateMysqlNumericTypesSql = "TRUNCATE TABLE mysql_numeric_types"; + public async Task TruncateMysqlNumericTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAuthorsSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlNumericTypesSql, connection)) { - command.Parameters.AddWithValue("@limit", args.Limit); - command.Parameters.AddWithValue("@offset", args.Offset); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAuthorsSql; + command.CommandText = TruncateMysqlNumericTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@limit", args.Limit); - command.Parameters.AddWithValue("@offset", args.Offset); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio)"; - public class CreateAuthorArgs + private const string InsertMysqlStringTypesSql = " INSERT INTO mysql_string_types ( c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set ) VALUES (@c_char, @c_nchar, @c_national_char, @c_varchar, @c_tinytext, @c_mediumtext, @c_text, @c_longtext, @c_json, @c_json_string_override, @c_enum, @c_set)"; + public class InsertMysqlStringTypesArgs { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task CreateAuthor(CreateAuthorArgs args) + public async Task InsertMysqlStringTypes(InsertMysqlStringTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateAuthorSql, connection)) + using (var command = new MySqlCommand(InsertMysqlStringTypesSql, connection)) { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1463,81 +1260,216 @@ public async Task CreateAuthor(CreateAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorSql; + command.CommandText = InsertMysqlStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_nchar", args.CNchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_national_char", args.CNationalChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinytext", args.CTinytext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumtext", args.CMediumtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longtext", args.CLongtext ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json", args.CJson?.GetRawText() ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_set", args.CSet != null ? string.Join(",", args.CSet) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio)"; - public class CreateAuthorReturnIdArgs + public class InsertMysqlStringTypesBatchArgs { - public string Name { get; set; } - public string Bio { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } + }; + public async Task InsertMysqlStringTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter>(new Utils.MysqlStringTypesCSetCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_string_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_char", "c_nchar", "c_national_char", "c_varchar", "c_tinytext", "c_mediumtext", "c_text", "c_longtext", "c_json", "c_json_string_override", "c_enum", "c_set" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlStringTypesSql = "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1"; + public class GetMysqlStringTypesRow + { + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task GetMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateAuthorReturnIdSql, connection)) + using (var command = new MySqlCommand(GetMysqlStringTypesSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorReturnIdSql; + command.CommandText = GetMysqlStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlStringTypesRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CNchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNationalChar = reader.IsDBNull(2) ? null : reader.GetString(2), + CVarchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CTinytext = reader.IsDBNull(4) ? null : reader.GetString(4), + CMediumtext = reader.IsDBNull(5) ? null : reader.GetString(5), + CText = reader.IsDBNull(6) ? null : reader.GetString(6), + CLongtext = reader.IsDBNull(7) ? null : reader.GetString(7), + CJson = reader.IsDBNull(8) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(8)), + CJsonStringOverride = reader.IsDBNull(9) ? null : reader.GetString(9), + CEnum = reader.IsDBNull(10) ? (MysqlStringTypesCEnum? )null : reader.GetString(10).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(11) ? null : reader.GetString(11).ToMysqlStringTypesCSetSet() + }; + } + } } + + return null; } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetMysqlStringTypesCntSql = "SELECT COUNT(*) AS cnt, c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types GROUP BY c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set LIMIT 1"; + public class GetMysqlStringTypesCntRow { - public long Id { get; set; } + public long Cnt { get; set; } + public string CChar { get; set; } + public string CNchar { get; set; } + public string CNationalChar { get; set; } + public string CVarchar { get; set; } + public string CTinytext { get; set; } + public string CMediumtext { get; set; } + public string CText { get; set; } + public string CLongtext { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public MysqlStringTypesCEnum? CEnum { get; set; } + public HashSet CSet { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetMysqlStringTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorByIdSql, connection)) + using (var command = new MySqlCommand(GetMysqlStringTypesCntSql, connection)) { - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetMysqlStringTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() }; } } @@ -1551,18 +1483,27 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = GetMysqlStringTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetMysqlStringTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + Cnt = reader.GetInt64(0), + CChar = reader.IsDBNull(1) ? null : reader.GetString(1), + CNchar = reader.IsDBNull(2) ? null : reader.GetString(2), + CNationalChar = reader.IsDBNull(3) ? null : reader.GetString(3), + CVarchar = reader.IsDBNull(4) ? null : reader.GetString(4), + CTinytext = reader.IsDBNull(5) ? null : reader.GetString(5), + CMediumtext = reader.IsDBNull(6) ? null : reader.GetString(6), + CText = reader.IsDBNull(7) ? null : reader.GetString(7), + CLongtext = reader.IsDBNull(8) ? null : reader.GetString(8), + CJson = reader.IsDBNull(9) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(9)), + CJsonStringOverride = reader.IsDBNull(10) ? null : reader.GetString(10), + CEnum = reader.IsDBNull(11) ? (MysqlStringTypesCEnum? )null : reader.GetString(11).ToMysqlStringTypesCEnum(), + CSet = reader.IsDBNull(12) ? null : reader.GetString(12).ToMysqlStringTypesCSetSet() }; } } @@ -1571,70 +1512,56 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs - { - public string NamePattern { get; set; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncateMysqlStringTypesSql = "TRUNCATE TABLE mysql_string_types"; + public async Task TruncateMysqlStringTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorByNamePatternSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlStringTypesSql, connection)) { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByNamePatternSql; + command.CommandText = TruncateMysqlStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs + private const string InsertMysqlDatetimeTypesSql = " INSERT INTO mysql_datetime_types ( c_year, c_date, c_datetime, c_timestamp, c_time ) VALUES (@c_year, @c_date, @c_datetime, @c_timestamp, @c_time)"; + public class InsertMysqlDatetimeTypesArgs { - public string Name { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task InsertMysqlDatetimeTypes(InsertMysqlDatetimeTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(DeleteAuthorSql, connection)) + using (var command = new MySqlCommand(InsertMysqlDatetimeTypesSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1646,374 +1573,487 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = InsertMysqlDatetimeTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@c_year", args.CYear ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_datetime", args.CDatetime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() + public class InsertMysqlDatetimeTypesBatchArgs + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } + }; + public async Task InsertMysqlDatetimeTypesBatch(List args) + { + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) + { + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat + } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + csvWriter.Context.TypeConverterCache.AddConverter(nullConverterFn); + await csvWriter.WriteRecordsAsync(args); + } + + using (var connection = new MySqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) + { + Local = true, + TableName = "mysql_datetime_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_year", "c_date", "c_datetime", "c_timestamp", "c_time" }); + await loader.LoadAsync(); + await connection.CloseAsync(); + } + } + + private const string GetMysqlDatetimeTypesSql = "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1"; + public class GetMysqlDatetimeTypesRow + { + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } + }; + public async Task GetMysqlDatetimeTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(DeleteAllAuthorsSql, connection)) + using (var command = new MySqlCommand(GetMysqlDatetimeTypesSql, connection)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesRow + { + CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), + CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAllAuthorsSql; + command.CommandText = GetMysqlDatetimeTypesSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public class UpdateAuthorsArgs - { - public string Bio { get; set; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) + using (var reader = await command.ExecuteReaderAsync()) { - await connection.OpenAsync(); - using (var command = new MySqlCommand(UpdateAuthorsSql, connection)) + if (await reader.ReadAsync()) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + return new GetMysqlDatetimeTypesRow + { + CYear = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CDate = reader.IsDBNull(1) ? (DateTime? )null : reader.GetDateTime(1), + CDatetime = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestamp = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTime = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + }; } } } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = UpdateAuthorsSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); - } + return null; } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids)"; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string GetMysqlDatetimeTypesCntSql = "SELECT COUNT(*) AS cnt, c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types GROUP BY c_year, c_date, c_datetime, c_timestamp, c_time LIMIT 1"; + public class GetMysqlDatetimeTypesCntRow { - public long[] Ids { get; set; } + public long Cnt { get; set; } + public short? CYear { get; set; } + public DateTime? CDate { get; set; } + public DateTime? CDatetime { get; set; } + public DateTime? CTimestamp { get; set; } + public TimeSpan? CTime { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetMysqlDatetimeTypesCnt() { - var transformedSql = GetAuthorsByIdsSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(transformedSql, connection)) + using (var command = new MySqlCommand(GetMysqlDatetimeTypesCntSql, connection)) { - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), + CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = transformedSql; + command.CommandText = GetMysqlDatetimeTypesCntSql; command.Transaction = this.Transaction; - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlDatetimeTypesCntRow + { + Cnt = reader.GetInt64(0), + CYear = reader.IsDBNull(1) ? (short? )null : reader.GetInt16(1), + CDate = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CDatetime = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CTimestamp = reader.IsDBNull(4) ? (DateTime? )null : reader.GetDateTime(4), + CTime = reader.IsDBNull(5) ? (TimeSpan? )null : reader.GetFieldValue(5) + }; + } } } + + return null; } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/@ids) AND name IN (/*SLICE:names*/@names)"; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public long[] Ids { get; set; } - public string[] Names { get; set; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncateMysqlDatetimeTypesSql = "TRUNCATE TABLE mysql_datetime_types"; + public async Task TruncateMysqlDatetimeTypes() { - var transformedSql = GetAuthorsByIdsAndNamesSql; - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Ids.Length, "ids"); - transformedSql = Utils.TransformQueryForSliceArgs(transformedSql, args.Names.Length, "names"); if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(transformedSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlDatetimeTypesSql, connection)) { - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = transformedSql; + command.CommandText = TruncateMysqlDatetimeTypesSql; command.Transaction = this.Transaction; - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id)"; - public class CreateBookArgs + private const string InsertMysqlBinaryTypesSql = " INSERT INTO mysql_binary_types ( c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob ) VALUES (@c_bit, @c_binary, @c_varbinary, @c_tinyblob, @c_blob, @c_mediumblob, @c_longblob)"; + public class InsertMysqlBinaryTypesArgs { - public string Name { get; set; } - public long AuthorId { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertMysqlBinaryTypes(InsertMysqlBinaryTypesArgs args) { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateBookSql, connection)) + using (var command = new MySqlCommand(InsertMysqlBinaryTypesSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = InsertMysqlBinaryTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_binary", args.CBinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varbinary", args.CVarbinary ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_tinyblob", args.CTinyblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_mediumblob", args.CMediumblob ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_longblob", args.CLongblob ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); - return command.LastInsertedId; } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + public class InsertMysqlBinaryTypesBatchArgs { - public Author Author { get; set; } - public Book Book { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task InsertMysqlBinaryTypesBatch(List args) { - if (this.Transaction == null) + const string supportedDateTimeFormat = "yyyy-MM-dd H:mm:ss"; + var config = new CsvConfiguration(CultureInfo.CurrentCulture) { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(ListAllAuthorsBooksSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + Delimiter = ",", + NewLine = "\n" + }; + var nullConverterFn = new Utils.NullToStringCsvConverter(); + using (var writer = new StreamWriter("input.csv", false, new UTF8Encoding(false))) + using (var csvWriter = new CsvWriter(writer, config)) + { + var options = new TypeConverterOptions + { + Formats = new[] + { + supportedDateTimeFormat } - } + }; + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterOptionsCache.AddOptions(options); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteCsvConverter()); + csvWriter.Context.TypeConverterCache.AddConverter(new Utils.ByteArrayCsvConverter()); + await csvWriter.WriteRecordsAsync(args); } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + using (var connection = new MySqlConnection(ConnectionString)) { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + await connection.OpenAsync(); + var loader = new MySqlBulkLoader(connection) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + Local = true, + TableName = "mysql_binary_types", + FileName = "input.csv", + FieldTerminator = ",", + FieldQuotationCharacter = '"', + FieldQuotationOptional = true, + NumberOfLinesToSkip = 1, + LineTerminator = "\n" + }; + loader.Columns.AddRange(new List { "c_bit", "c_binary", "c_varbinary", "c_tinyblob", "c_blob", "c_mediumblob", "c_longblob" }); + await loader.LoadAsync(); + await connection.CloseAsync(); } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string GetMysqlBinaryTypesSql = "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1"; + public class GetMysqlBinaryTypesRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task GetMysqlBinaryTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetDuplicateAuthorsSql, connection)) + using (var command = new MySqlCommand(GetMysqlBinaryTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetMysqlBinaryTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesRow + { + CBit = reader.IsDBNull(0) ? (byte? )null : reader.GetFieldValue(0), + CBinary = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CVarbinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CTinyblob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CBlob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CMediumblob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CLongblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs + private const string GetMysqlBinaryTypesCntSql = "SELECT COUNT(*) AS cnt, c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types GROUP BY c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob LIMIT 1"; + public class GetMysqlBinaryTypesCntRow { - public string Name { get; set; } + public long Cnt { get; set; } + public byte? CBit { get; set; } + public byte[] CBinary { get; set; } + public byte[] CVarbinary { get; set; } + public byte[] CTinyblob { get; set; } + public byte[] CBlob { get; set; } + public byte[] CMediumblob { get; set; } + public byte[] CLongblob { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task GetMysqlBinaryTypesCnt() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetAuthorsByBookNameSql, connection)) + using (var command = new MySqlCommand(GetMysqlBinaryTypesCntSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = GetMysqlBinaryTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt64(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetMysqlBinaryTypesCntRow + { + Cnt = reader.GetInt64(0), + CBit = reader.IsDBNull(1) ? (byte? )null : reader.GetFieldValue(1), + CBinary = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CVarbinary = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CTinyblob = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CBlob = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CMediumblob = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CLongblob = reader.IsDBNull(7) ? null : reader.GetFieldValue(7) + }; + } } } + + return null; } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (@author_name, @name, @bio_type, @author_type)"; - public class CreateExtendedBioArgs - { - public string AuthorName { get; set; } - public string Name { get; set; } - public BiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } - }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + private const string TruncateMysqlBinaryTypesSql = "TRUNCATE TABLE mysql_binary_types"; + public async Task TruncateMysqlBinaryTypes() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(CreateExtendedBioSql, connection)) + using (var command = new MySqlCommand(TruncateMysqlBinaryTypesSql, connection)) { - command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -2025,48 +2065,37 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateExtendedBioSql; + command.CommandText = TruncateMysqlBinaryTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@author_name", args.AuthorName ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@name", args.Name ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@author_type", args.AuthorType != null ? string.Join(",", args.AuthorType) : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow - { - public string AuthorName { get; set; } - public string Name { get; set; } - public BiosBioType? BioType { get; set; } - public HashSet AuthorType { get; set; } - }; - public class GetFirstExtendedBioByTypeArgs + private const string GetMysqlFunctionsSql = " SELECT MAX(c_int) AS max_int, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM mysql_numeric_types CROSS JOIN mysql_string_types CROSS JOIN mysql_datetime_types"; + public class GetMysqlFunctionsRow { - public BiosBioType? BioType { get; set; } + public int? MaxInt { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + public async Task GetMysqlFunctions() { if (this.Transaction == null) { using (var connection = new MySqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new MySqlCommand(GetFirstExtendedBioByTypeSql, connection)) + using (var command = new MySqlCommand(GetMysqlFunctionsSql, connection)) { - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetMysqlFunctionsRow { - AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), - Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -2080,19 +2109,17 @@ public async Task GetFirstExtendedBioByType(GetFir throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetFirstExtendedBioByTypeSql; + command.CommandText = GetMysqlFunctionsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio_type", args.BioType ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetMysqlFunctionsRow { - AuthorName = reader.IsDBNull(0) ? null : reader.GetString(0), - Name = reader.IsDBNull(1) ? null : reader.GetString(1), - BioType = reader.IsDBNull(2) ? (BiosBioType? )null : reader.GetString(2).ToBiosBioType(), - AuthorType = reader.IsDBNull(3) ? null : reader.GetString(3).ToBiosAuthorTypeSet() + MaxInt = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -2100,32 +2127,5 @@ public async Task GetFirstExtendedBioByType(GetFir return null; } - - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() - { - if (this.Transaction == null) - { - using (var connection = new MySqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new MySqlCommand(TruncateExtendedBiosSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncateExtendedBiosSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } } } \ No newline at end of file diff --git a/examples/MySqlConnectorLegacyExample/request.json b/examples/MySqlConnectorLegacyExample/request.json index 4a6c34fe..350da4df 100644 --- a/examples/MySqlConnectorLegacyExample/request.json +++ b/examples/MySqlConnectorLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "mysql", "schema": [ - "examples/config/mysql/types/schema.sql", - "examples/config/mysql/authors/schema.sql" + "examples/config/mysql/authors/schema.sql", + "examples/config/mysql/types/schema.sql" ], "queries": [ - "examples/config/mysql/types/query.sql", - "examples/config/mysql/authors/query.sql" + "examples/config/mysql/authors/query.sql", + "examples/config/mysql/types/query.sql" ], "codegen": { "out": "examples/MySqlConnectorLegacyExample", @@ -25,6 +25,95 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "bigint" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "mysql_numeric_types" @@ -442,98 +531,25 @@ } } ] - }, + } + ], + "enums": [ { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } + "name": "bios_bio_type", + "vals": [ + "Autobiography", + "Biography", + "Memoir" ] }, { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "bigint" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } + "name": "bios_author_type", + "vals": [ + "Author", + "Editor", + "Translator" ] - } - ], - "enums": [ + }, { "name": "mysql_string_types_c_enum", "vals": [ @@ -549,22 +565,6 @@ "coffee", "milk" ] - }, - { - "name": "bios_bio_type", - "vals": [ - "Autobiography", - "Biography", - "Memoir" - ] - }, - { - "name": "bios_author_type", - "vals": [ - "Author", - "Editor", - "Translator" - ] } ] }, @@ -629,3145 +629,3145 @@ }, "queries": [ { - "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlNumericTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_bool", - "length": 1, + "name": "name", + "notNull": true, + "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - } - }, - { - "number": 2, - "column": { - "name": "c_boolean", - "length": 1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "tinyint" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ { - "number": 3, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 4, - "column": { - "name": "c_smallint", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 5, - "column": { - "name": "c_mediumint", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 6, + "number": 1, "column": { - "name": "c_int", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, "type": { - "name": "int" - }, - "originalName": "c_int" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_integer", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, "type": { - "name": "int" - }, - "originalName": "c_integer" + "name": "integer" + } } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 8, + "number": 1, "column": { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" - } - }, - { - "number": 9, - "column": { - "name": "c_decimal", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - } - }, - { - "number": 10, - "column": { - "name": "c_dec", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - } - }, - { - "number": 11, - "column": { - "name": "c_numeric", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" + "originalName": "id" } }, { - "number": 12, + "number": 2, "column": { - "name": "c_fixed", - "length": 10, + "name": "name", + "notNull": true, + "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_fixed" + "originalName": "name" } }, { - "number": 13, + "number": 3, "column": { - "name": "c_float", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "float" + "name": "text" }, - "originalName": "c_float" + "originalName": "bio" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "parameters": [ { - "number": 14, + "number": 1, "column": { - "name": "c_double", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double" + "originalName": "name" } }, { - "number": 15, + "number": 2, "column": { - "name": "c_double_precision", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "bio" } } ], - "comments": [ - " Numeric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_numeric_types" + "name": "authors" } }, { - "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlNumericTypesBatch", - "cmd": ":copyfrom", + "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_bool", - "length": 1, + "name": "id", + "notNull": true, + "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "tinyint" + "name": "bigint" }, - "originalName": "c_bool" + "originalName": "id" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ { - "number": 2, - "column": { - "name": "c_boolean", - "length": 1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 3, - "column": { - "name": "c_tinyint", - "length": 3, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 4, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_smallint", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "smallint" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_mediumint", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "text" }, - "originalName": "c_mediumint" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 6, + "number": 1, "column": { - "name": "c_int", + "name": "bio", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "bio" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_bigint", + "name": "ids", + "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { "name": "bigint" }, - "originalName": "c_bigint" + "isSqlcSlice": true, + "originalName": "id" } - }, - { - "number": 9, - "column": { - "name": "c_float", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" - } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "number": 10, - "column": { - "name": "c_numeric", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigint" + }, + "originalName": "id" }, { - "number": 11, - "column": { - "name": "c_decimal", - "length": 10, - "table": { - "schema": "public", - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 12, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_dec", - "length": 10, + "name": "ids", + "notNull": true, + "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "bigint" }, - "originalName": "c_dec" + "isSqlcSlice": true, + "originalName": "id" } }, { - "number": 13, + "number": 2, "column": { - "name": "c_fixed", - "length": 10, + "name": "names", + "notNull": true, + "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "decimal" + "name": "text" }, - "originalName": "c_fixed" + "isSqlcSlice": true, + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", + "name": "CreateBook", + "cmd": ":execlastid", + "parameters": [ { - "number": 14, + "number": 1, "column": { - "name": "c_double", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "books" }, "type": { - "name": "double" + "name": "text" }, - "originalName": "c_double" + "originalName": "name" } }, { - "number": 15, + "number": 2, "column": { - "name": "c_double_precision", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "mysql_numeric_types" + "name": "books" }, "type": { - "name": "double" + "name": "bigint" }, - "originalName": "c_double_precision" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_numeric_types" + "name": "books" } }, { - "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", - "name": "GetMysqlNumericTypes", - "cmd": ":one", + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", + "columns": [ { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_smallint", + "name": "authors", + "length": -1, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "smallint" + "name": "bigint" }, - "originalName": "c_smallint" + "originalName": "id" }, { - "name": "c_mediumint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "mediumint" + "name": "text" }, - "originalName": "c_mediumint" + "originalName": "name" }, { - "name": "c_int", + "name": "bio", "length": -1, "table": { - "name": "mysql_numeric_types" + "name": "authors" }, "type": { - "name": "int" + "name": "text" }, - "originalName": "c_int" + "originalName": "bio" }, { - "name": "c_integer", + "name": "books", "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - }, + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ { - "name": "c_bigint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "bigint" - }, - "originalName": "c_bigint" - }, + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ { - "name": "c_float", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" + "number": 1, + "column": { + "name": "author_name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "length": 100, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "varchar" + }, + "originalName": "name" + } }, { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" + "number": 3, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } }, { - "name": "c_numeric", - "length": 10, + "number": 4, + "column": { + "name": "author_type", + "length": 24, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_author_type" + }, + "originalName": "author_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "length": 100, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "decimal" + "name": "varchar" }, - "originalName": "c_numeric" + "originalName": "author_name" }, { - "name": "c_fixed", - "length": 10, + "name": "name", + "length": 100, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "decimal" + "name": "varchar" }, - "originalName": "c_fixed" + "originalName": "name" }, { - "name": "c_double", - "length": -1, + "name": "bio_type", + "length": 13, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "double" + "name": "bios_bio_type" }, - "originalName": "c_double" + "originalName": "bio_type" }, { - "name": "c_double_precision", - "length": -1, + "name": "author_type", + "length": 24, "table": { - "name": "mysql_numeric_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "double" + "name": "bios_author_type" }, - "originalName": "c_double_precision" + "originalName": "author_type" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "bio_type", + "length": 13, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "bios_bio_type" + }, + "originalName": "bio_type" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nLIMIT 1", - "name": "GetMysqlNumericTypesCnt", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_decimal, \n c_dec, \n c_numeric, \n c_fixed, \n c_float, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypes", + "cmd": ":exec", + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "c_bool", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" } }, { - "name": "c_bool", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_bool" - }, - { - "name": "c_boolean", - "length": 1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_boolean" - }, - { - "name": "c_tinyint", - "length": 3, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "tinyint" - }, - "originalName": "c_tinyint" - }, - { - "name": "c_smallint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "smallint" - }, - "originalName": "c_smallint" - }, - { - "name": "c_mediumint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "mediumint" - }, - "originalName": "c_mediumint" - }, - { - "name": "c_int", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_int" - }, - { - "name": "c_integer", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "int" - }, - "originalName": "c_integer" - }, - { - "name": "c_bigint", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "bigint" - }, - "originalName": "c_bigint" - }, - { - "name": "c_float", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "float" - }, - "originalName": "c_float" - }, - { - "name": "c_numeric", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_numeric" - }, - { - "name": "c_decimal", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_decimal" - }, - { - "name": "c_dec", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_dec" - }, - { - "name": "c_fixed", - "length": 10, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "decimal" - }, - "originalName": "c_fixed" - }, - { - "name": "c_double", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double" - }, - { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "mysql_numeric_types" - }, - "type": { - "name": "double" - }, - "originalName": "c_double_precision" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_numeric_types", - "name": "TruncateMysqlNumericTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlStringTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 2, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } + "number": 2, + "column": { + "name": "c_boolean", + "length": 1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + } }, { "number": 3, "column": { - "name": "c_national_char", - "length": -1, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "char" + "name": "tinyint" }, - "originalName": "c_national_char" + "originalName": "c_tinyint" } }, { "number": 4, "column": { - "name": "c_varchar", - "length": 100, + "name": "c_smallint", + "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "varchar" + "name": "smallint" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 5, "column": { - "name": "c_tinytext", + "name": "c_mediumint", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "tinytext" + "name": "mediumint" }, - "originalName": "c_tinytext" + "originalName": "c_mediumint" } }, { "number": 6, "column": { - "name": "c_mediumtext", + "name": "c_int", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "mediumtext" + "name": "int" }, - "originalName": "c_mediumtext" + "originalName": "c_int" } }, { "number": 7, "column": { - "name": "c_text", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "text" + "name": "int" }, - "originalName": "c_text" + "originalName": "c_integer" } }, { "number": 8, "column": { - "name": "c_longtext", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "longtext" + "name": "bigint" }, - "originalName": "c_longtext" + "originalName": "c_bigint" } }, { "number": 9, "column": { - "name": "c_json", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", - "name": "mysql_string_types" + "name": "mysql_numeric_types" }, "type": { - "name": "json" + "name": "decimal" }, - "originalName": "c_json" + "originalName": "c_decimal" } }, { "number": 10, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 11, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 12, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_string_types" - } - }, - { - "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - } - }, - { - "number": 2, - "column": { - "name": "c_nchar", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - } - }, - { - "number": 3, - "column": { - "name": "c_national_char", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - } - }, - { - "number": 4, - "column": { - "name": "c_varchar", - "length": 100, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - } - }, - { - "number": 5, - "column": { - "name": "c_tinytext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - } - }, - { - "number": 6, - "column": { - "name": "c_mediumtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - } - }, - { - "number": 7, - "column": { - "name": "c_text", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - }, - { - "number": 8, - "column": { - "name": "c_longtext", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - } - }, - { - "number": 9, - "column": { - "name": "c_json", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - } - }, - { - "number": 10, - "column": { - "name": "c_json_string_override", - "length": -1, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - } - }, - { - "number": 11, - "column": { - "name": "c_enum", - "length": 6, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - } - }, - { - "number": 12, - "column": { - "name": "c_set", - "length": 15, - "table": { - "schema": "public", - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_string_types" - } - }, - { - "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", - "name": "GetMysqlStringTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - }, - { - "name": "c_nchar", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - }, - { - "name": "c_national_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_tinytext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - }, - { - "name": "c_mediumtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - }, - { - "name": "c_longtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" - }, - { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" - }, - { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" - }, - { - "name": "c_set", - "length": 15, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", - "name": "GetMysqlStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - }, - { - "name": "c_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_char" - }, - { - "name": "c_nchar", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_nchar" - }, - { - "name": "c_national_char", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "char" - }, - "originalName": "c_national_char" - }, - { - "name": "c_varchar", - "length": 100, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "varchar" - }, - "originalName": "c_varchar" - }, - { - "name": "c_tinytext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "tinytext" - }, - "originalName": "c_tinytext" - }, - { - "name": "c_mediumtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mediumtext" - }, - "originalName": "c_mediumtext" - }, - { - "name": "c_text", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" + "column": { + "name": "c_dec", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" + } }, { - "name": "c_longtext", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "longtext" - }, - "originalName": "c_longtext" + "number": 11, + "column": { + "name": "c_numeric", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" + } }, { - "name": "c_json", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" + "number": 12, + "column": { + "name": "c_fixed", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + } }, { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json_string_override" + "number": 13, + "column": { + "name": "c_float", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + } }, { - "name": "c_enum", - "length": 6, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_enum" - }, - "originalName": "c_enum" + "number": 14, + "column": { + "name": "c_double", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + } }, { - "name": "c_set", - "length": 15, - "table": { - "name": "mysql_string_types" - }, - "type": { - "name": "mysql_string_types_c_set" - }, - "originalName": "c_set" + "number": 15, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" + } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE mysql_string_types", - "name": "TruncateMysqlStringTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Numeric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_numeric_types" + } }, { - "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", - "name": "InsertMysqlDatetimeTypes", - "cmd": ":exec", + "text": "INSERT INTO mysql_numeric_types \n(\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint, \n c_float, \n c_numeric, \n c_decimal, \n c_dec, \n c_fixed, \n c_double, \n c_double_precision\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlNumericTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_year", - "length": -1, + "name": "c_bool", + "length": 1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "tinyint" }, - "originalName": "c_year" + "originalName": "c_bool" } }, { "number": 2, "column": { - "name": "c_date", - "length": -1, + "name": "c_boolean", + "length": 1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "tinyint" }, - "originalName": "c_date" + "originalName": "c_boolean" } }, { "number": 3, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_tinyint", + "length": 3, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "tinyint" }, - "originalName": "c_datetime" + "originalName": "c_tinyint" } }, { "number": 4, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_smallint", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "smallint" }, - "originalName": "c_timestamp" + "originalName": "c_smallint" } }, { "number": 5, "column": { - "name": "c_time", + "name": "c_mediumint", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" + } + }, + { + "number": 6, + "column": { + "name": "c_int", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" + } + }, + { + "number": 7, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" + } + }, + { + "number": 8, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" + } + }, + { + "number": 9, + "column": { + "name": "c_float", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + } + }, + { + "number": 10, + "column": { + "name": "c_numeric", "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "decimal" }, - "originalName": "c_time" + "originalName": "c_numeric" } - } - ], - "comments": [ - " Datetime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "mysql_datetime_types" - } - }, - { - "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", - "name": "InsertMysqlDatetimeTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 11, "column": { - "name": "c_year", - "length": -1, + "name": "c_decimal", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "decimal" }, - "originalName": "c_year" + "originalName": "c_decimal" } }, { - "number": 2, + "number": 12, "column": { - "name": "c_date", - "length": -1, + "name": "c_dec", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_dec" } }, { - "number": 3, + "number": 13, "column": { - "name": "c_datetime", - "length": 19, + "name": "c_fixed", + "length": 10, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_fixed" } }, { - "number": 4, + "number": 14, "column": { - "name": "c_timestamp", - "length": 19, + "name": "c_double", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "double" }, - "originalName": "c_timestamp" + "originalName": "c_double" } }, { - "number": 5, + "number": 15, "column": { - "name": "c_time", - "length": 10, + "name": "c_double_precision", + "length": -1, "table": { "schema": "public", - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "double" }, - "originalName": "c_time" + "originalName": "c_double_precision" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" } }, { - "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", - "name": "GetMysqlDatetimeTypes", + "text": "SELECT c_bool, c_boolean, c_tinyint, c_smallint, c_mediumint, c_int, c_integer, c_bigint, c_float, c_decimal, c_dec, c_numeric, c_fixed, c_double, c_double_precision FROM mysql_numeric_types LIMIT 1", + "name": "GetMysqlNumericTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_bool", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_bool" + }, + { + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + }, + { + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" + }, + { + "name": "c_smallint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "smallint" + }, + "originalName": "c_smallint" + }, + { + "name": "c_mediumint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "mediumint" + }, + "originalName": "c_mediumint" + }, + { + "name": "c_int", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_int" + }, + { + "name": "c_integer", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "int" + }, + "originalName": "c_integer" + }, + { + "name": "c_bigint", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "bigint" + }, + "originalName": "c_bigint" + }, + { + "name": "c_float", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "float" + }, + "originalName": "c_float" + }, + { + "name": "c_decimal", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_decimal" + }, + { + "name": "c_dec", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_dec" + }, + { + "name": "c_numeric", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_numeric" + }, + { + "name": "c_fixed", + "length": 10, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "decimal" + }, + "originalName": "c_fixed" + }, + { + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + }, + { + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n COUNT(*) AS cnt,\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nFROM mysql_numeric_types\nGROUP BY\n c_bool,\n c_boolean,\n c_tinyint,\n c_smallint,\n c_mediumint,\n c_int,\n c_integer,\n c_bigint,\n c_float,\n c_numeric,\n c_decimal,\n c_dec,\n c_fixed,\n c_double,\n c_double_precision\nLIMIT 1", + "name": "GetMysqlNumericTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_year", + "name": "cnt", + "notNull": true, "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + }, + { + "name": "c_bool", + "length": 1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "tinyint" }, - "originalName": "c_year" + "originalName": "c_bool" }, { - "name": "c_date", + "name": "c_boolean", + "length": 1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_boolean" + }, + { + "name": "c_tinyint", + "length": 3, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "tinyint" + }, + "originalName": "c_tinyint" + }, + { + "name": "c_smallint", "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "smallint" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_datetime", - "length": 19, + "name": "c_mediumint", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "mediumint" }, - "originalName": "c_datetime" + "originalName": "c_mediumint" }, { - "name": "c_timestamp", - "length": 19, + "name": "c_int", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "int" }, - "originalName": "c_timestamp" + "originalName": "c_int" }, { - "name": "c_time", - "length": 10, + "name": "c_integer", + "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "int" }, - "originalName": "c_time" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", - "name": "GetMysqlDatetimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_integer" + }, { - "name": "cnt", - "notNull": true, + "name": "c_bigint", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_numeric_types" + }, "type": { "name": "bigint" - } + }, + "originalName": "c_bigint" }, { - "name": "c_year", + "name": "c_float", "length": -1, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "year" + "name": "float" }, - "originalName": "c_year" + "originalName": "c_float" }, { - "name": "c_date", - "length": -1, + "name": "c_numeric", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "date" + "name": "decimal" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_datetime", - "length": 19, + "name": "c_decimal", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "datetime" + "name": "decimal" }, - "originalName": "c_datetime" + "originalName": "c_decimal" }, { - "name": "c_timestamp", - "length": 19, + "name": "c_dec", + "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "timestamp" + "name": "decimal" }, - "originalName": "c_timestamp" + "originalName": "c_dec" }, { - "name": "c_time", + "name": "c_fixed", "length": 10, "table": { - "name": "mysql_datetime_types" + "name": "mysql_numeric_types" }, "type": { - "name": "time" + "name": "decimal" }, - "originalName": "c_time" + "originalName": "c_fixed" + }, + { + "name": "c_double", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double" + }, + { + "name": "c_double_precision", + "length": -1, + "table": { + "name": "mysql_numeric_types" + }, + "type": { + "name": "double" + }, + "originalName": "c_double_precision" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_datetime_types", - "name": "TruncateMysqlDatetimeTypes", + "text": "TRUNCATE TABLE mysql_numeric_types", + "name": "TruncateMysqlNumericTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlBinaryTypes", + "text": "\nINSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bit", - "length": 8, + "name": "c_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_char" + } + }, + { + "number": 2, + "column": { + "name": "c_nchar", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_nchar" + } + }, + { + "number": 3, + "column": { + "name": "c_national_char", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "char" + }, + "originalName": "c_national_char" + } + }, + { + "number": 4, + "column": { + "name": "c_varchar", + "length": 100, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + } + }, + { + "number": 5, + "column": { + "name": "c_tinytext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "tinytext" + }, + "originalName": "c_tinytext" + } + }, + { + "number": 6, + "column": { + "name": "c_mediumtext", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_binary", - "length": 3, + "name": "c_text", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "c_text" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "longtext" }, - "originalName": "c_varbinary" + "originalName": "c_longtext" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_tinyblob", + "name": "c_json", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "json" }, - "originalName": "c_tinyblob" + "originalName": "c_json" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_blob", + "name": "c_json_string_override", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "json" }, - "originalName": "c_blob" + "originalName": "c_json_string_override" } }, { - "number": 6, + "number": 11, "column": { - "name": "c_mediumblob", - "length": -1, + "name": "c_enum", + "length": 6, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_mediumblob" + "originalName": "c_enum" } }, { - "number": 7, + "number": 12, "column": { - "name": "c_longblob", - "length": -1, + "name": "c_set", + "length": 15, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "mysql_string_types_c_set" }, - "originalName": "c_longblob" + "originalName": "c_set" } } ], "comments": [ - " Binary types " + " String types " ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" } }, { - "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", - "name": "InsertMysqlBinaryTypesBatch", + "text": "INSERT INTO mysql_string_types \n(\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext, \n c_json,\n c_json_string_override,\n c_enum,\n c_set\n) \nVALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlStringTypesBatch", "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_bit", - "length": 8, + "name": "c_char", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "char" }, - "originalName": "c_bit" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_binary", - "length": 3, + "name": "c_nchar", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "char" }, - "originalName": "c_binary" + "originalName": "c_nchar" } }, { "number": 3, "column": { - "name": "c_varbinary", - "length": 10, + "name": "c_national_char", + "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_national_char" } }, { "number": 4, "column": { - "name": "c_tinyblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "varchar" }, - "originalName": "c_tinyblob" + "originalName": "c_varchar" } }, { "number": 5, "column": { - "name": "c_blob", + "name": "c_tinytext", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "tinytext" }, - "originalName": "c_blob" + "originalName": "c_tinytext" } }, { "number": 6, "column": { - "name": "c_mediumblob", + "name": "c_mediumtext", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mediumtext" }, - "originalName": "c_mediumblob" + "originalName": "c_mediumtext" } }, { "number": 7, "column": { - "name": "c_longblob", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "text" }, - "originalName": "c_longblob" + "originalName": "c_text" + } + }, + { + "number": 8, + "column": { + "name": "c_longtext", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + } + }, + { + "number": 9, + "column": { + "name": "c_json", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json" + } + }, + { + "number": 10, + "column": { + "name": "c_json_string_override", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "json" + }, + "originalName": "c_json_string_override" + } + }, + { + "number": 11, + "column": { + "name": "c_enum", + "length": 6, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_enum" + }, + "originalName": "c_enum" + } + }, + { + "number": 12, + "column": { + "name": "c_set", + "length": 15, + "table": { + "schema": "public", + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" } } ], "filename": "query.sql", "insert_into_table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" } }, { - "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", - "name": "GetMysqlBinaryTypes", + "text": "SELECT c_char, c_nchar, c_national_char, c_varchar, c_tinytext, c_mediumtext, c_text, c_longtext, c_json, c_json_string_override, c_enum, c_set FROM mysql_string_types LIMIT 1", + "name": "GetMysqlStringTypes", "cmd": ":one", "columns": [ - { - "name": "c_bit", - "length": 8, - "table": { - "name": "mysql_binary_types" - }, - "type": { - "name": "bit" - }, - "originalName": "c_bit" - }, - { - "name": "c_binary", - "length": 3, - "table": { - "name": "mysql_binary_types" - }, - "type": { - "name": "binary" - }, - "originalName": "c_binary" - }, - { - "name": "c_varbinary", - "length": 10, + { + "name": "c_char", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "char" }, - "originalName": "c_varbinary" + "originalName": "c_char" }, { - "name": "c_tinyblob", + "name": "c_nchar", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "char" }, - "originalName": "c_tinyblob" + "originalName": "c_nchar" }, { - "name": "c_blob", + "name": "c_national_char", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "char" }, - "originalName": "c_blob" + "originalName": "c_national_char" }, { - "name": "c_mediumblob", - "length": -1, + "name": "c_varchar", + "length": 100, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "varchar" }, - "originalName": "c_mediumblob" + "originalName": "c_varchar" }, { - "name": "c_longblob", + "name": "c_tinytext", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "tinytext" }, - "originalName": "c_longblob" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", - "name": "GetMysqlBinaryTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } + "originalName": "c_tinytext" }, { - "name": "c_bit", - "length": 8, + "name": "c_mediumtext", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "bit" + "name": "mediumtext" }, - "originalName": "c_bit" + "originalName": "c_mediumtext" }, { - "name": "c_binary", - "length": 3, + "name": "c_text", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "binary" + "name": "text" }, - "originalName": "c_binary" + "originalName": "c_text" }, { - "name": "c_varbinary", - "length": 10, + "name": "c_longtext", + "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "varbinary" + "name": "longtext" }, - "originalName": "c_varbinary" + "originalName": "c_longtext" }, { - "name": "c_tinyblob", + "name": "c_json", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "tinyblob" + "name": "json" }, - "originalName": "c_tinyblob" + "originalName": "c_json" }, { - "name": "c_blob", + "name": "c_json_string_override", "length": -1, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "blob" + "name": "json" }, - "originalName": "c_blob" + "originalName": "c_json_string_override" }, { - "name": "c_mediumblob", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "mediumblob" + "name": "mysql_string_types_c_enum" }, - "originalName": "c_mediumblob" + "originalName": "c_enum" }, { - "name": "c_longblob", - "length": -1, + "name": "c_set", + "length": 15, "table": { - "name": "mysql_binary_types" + "name": "mysql_string_types" }, "type": { - "name": "longblob" + "name": "mysql_string_types_c_set" }, - "originalName": "c_longblob" + "originalName": "c_set" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE mysql_binary_types", - "name": "TruncateMysqlBinaryTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", - "name": "GetMysqlFunctions", + "text": "SELECT\n COUNT(*) AS cnt,\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nFROM mysql_string_types\nGROUP BY\n c_char,\n c_nchar,\n c_national_char,\n c_varchar,\n c_tinytext,\n c_mediumtext,\n c_text,\n c_longtext,\n c_json,\n c_json_string_override,\n c_enum,\n c_set\nLIMIT 1", + "name": "GetMysqlStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "max_int", + "name": "cnt", "notNull": true, "length": -1, "isFuncCall": true, "type": { - "name": "any" + "name": "bigint" } }, { - "name": "max_varchar", - "notNull": true, + "name": "c_char", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_string_types" + }, "type": { - "name": "any" - } + "name": "char" + }, + "originalName": "c_char" }, { - "name": "max_timestamp", - "notNull": true, + "name": "c_nchar", "length": -1, - "isFuncCall": true, + "table": { + "name": "mysql_string_types" + }, "type": { - "name": "any" - } - } - ], - "comments": [ - " Functions " - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ + "name": "char" + }, + "originalName": "c_nchar" + }, { - "name": "id", - "notNull": true, + "name": "c_national_char", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "char" }, - "originalName": "id" + "originalName": "c_national_char" }, { - "name": "name", - "notNull": true, + "name": "c_varchar", + "length": 100, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "varchar" + }, + "originalName": "c_varchar" + }, + { + "name": "c_tinytext", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "tinytext" }, - "originalName": "name" + "originalName": "c_tinytext" }, { - "name": "bio", + "name": "c_mediumtext", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" + }, + "type": { + "name": "mediumtext" + }, + "originalName": "c_mediumtext" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "mysql_string_types" }, "type": { "name": "text" }, - "originalName": "bio" - } - ], - "parameters": [ + "originalName": "c_text" + }, { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT ? OFFSET ?", - "name": "ListAuthors", - "cmd": ":many", - "columns": [ + "name": "c_longtext", + "length": -1, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "longtext" + }, + "originalName": "c_longtext" + }, { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "bigint" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", - "length": -1, + "name": "c_enum", + "length": 6, "table": { - "name": "authors" + "name": "mysql_string_types" }, "type": { - "name": "text" + "name": "mysql_string_types_c_enum" }, - "originalName": "bio" + "originalName": "c_enum" + }, + { + "name": "c_set", + "length": 15, + "table": { + "name": "mysql_string_types" + }, + "type": { + "name": "mysql_string_types_c_set" + }, + "originalName": "c_set" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_string_types", + "name": "TruncateMysqlStringTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "limit", - "notNull": true, + "name": "c_year", "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, "type": { - "name": "integer" - } + "name": "year" + }, + "originalName": "c_year" } }, { "number": 2, "column": { - "name": "offset", - "notNull": true, + "name": "c_date", "length": -1, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, "type": { - "name": "integer" - } + "name": "date" + }, + "originalName": "c_date" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_datetime", + "length": 19, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "bigint" + "name": "datetime" }, - "originalName": "id" + "originalName": "c_datetime" } }, { - "number": 2, + "number": 4, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_timestamp", + "length": 19, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" } }, { - "number": 3, + "number": 5, "column": { - "name": "bio", - "length": -1, + "name": "c_time", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "time" }, - "originalName": "bio" + "originalName": "c_time" } } ], + "comments": [ + " Datetime types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "mysql_datetime_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?)", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", + "text": "INSERT INTO mysql_datetime_types \n(\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\n) \nVALUES (?, ?, ?, ?, ?)", + "name": "InsertMysqlDatetimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_year", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "year" }, - "originalName": "name" + "originalName": "c_year" } }, { "number": 2, "column": { - "name": "bio", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "bio" + "originalName": "c_date" + } + }, + { + "number": 3, + "column": { + "name": "c_datetime", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + } + }, + { + "number": 4, + "column": { + "name": "c_timestamp", + "length": 19, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + } + }, + { + "number": 5, + "column": { + "name": "c_time", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_datetime_types" + } + }, + { + "text": "SELECT c_year, c_date, c_datetime, c_timestamp, c_time FROM mysql_datetime_types LIMIT 1", + "name": "GetMysqlDatetimeTypes", + "cmd": ":one", + "columns": [ + { + "name": "c_year", + "length": -1, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "year" + }, + "originalName": "c_year" + }, + { + "name": "c_date", + "length": -1, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date" + }, + { + "name": "c_datetime", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "datetime" + }, + "originalName": "c_datetime" + }, + { + "name": "c_timestamp", + "length": 19, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "timestamp" + }, + "originalName": "c_timestamp" + }, + { + "name": "c_time", + "length": 10, + "table": { + "name": "mysql_datetime_types" + }, + "type": { + "name": "time" + }, + "originalName": "c_time" + } + ], + "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ? LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n COUNT(*) AS cnt,\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nFROM mysql_datetime_types\nGROUP BY\n c_year,\n c_date,\n c_datetime,\n c_timestamp,\n c_time\nLIMIT 1", + "name": "GetMysqlDatetimeTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { "name": "bigint" - }, - "originalName": "id" + } }, { - "name": "name", - "notNull": true, + "name": "c_year", "length": -1, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "year" }, - "originalName": "name" + "originalName": "c_year" }, { - "name": "bio", + "name": "c_date", "length": -1, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_date" + }, { - "name": "id", - "notNull": true, - "length": -1, + "name": "c_datetime", + "length": 19, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "bigint" + "name": "datetime" }, - "originalName": "id" + "originalName": "c_datetime" }, { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_timestamp", + "length": 19, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "timestamp" }, - "originalName": "name" + "originalName": "c_timestamp" }, { - "name": "bio", - "length": -1, + "name": "c_time", + "length": 10, "table": { - "name": "authors" + "name": "mysql_datetime_types" }, "type": { - "name": "text" + "name": "time" }, - "originalName": "bio" + "originalName": "c_time" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE mysql_datetime_types", + "name": "TruncateMysqlDatetimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", - "length": -1, - "isNamedParam": true, + "name": "c_bit", + "length": 8, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "bit" }, - "originalName": "name" + "originalName": "c_bit" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, - "length": -1, + "name": "c_binary", + "length": 3, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "binary" }, - "originalName": "name" + "originalName": "c_binary" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", - "length": -1, - "isNamedParam": true, + "name": "c_varbinary", + "length": 10, "table": { "schema": "public", - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "varbinary" }, - "originalName": "bio" + "originalName": "c_varbinary" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" + "number": 4, + "column": { + "name": "c_tinyblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "tinyblob" + }, + "originalName": "c_tinyblob" + } }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 5, + "column": { + "name": "c_blob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "blob" + }, + "originalName": "c_blob" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ + "number": 6, + "column": { + "name": "c_mediumblob", + "length": -1, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "mediumblob" + }, + "originalName": "c_mediumblob" + } + }, { - "number": 1, + "number": 7, "column": { - "name": "ids", - "notNull": true, + "name": "c_longblob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "longblob" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_longblob" } } ], - "filename": "query.sql" + "comments": [ + " Binary types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "mysql_binary_types" + } }, { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ + "text": "INSERT INTO mysql_binary_types \n(\n c_bit,\n c_binary, \n c_varbinary, \n c_tinyblob, \n c_blob, \n c_mediumblob, \n c_longblob\n) \nVALUES (?, ?, ?, ?, ?, ?, ?)", + "name": "InsertMysqlBinaryTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigint" - }, - "originalName": "id" + "number": 1, + "column": { + "name": "c_bit", + "length": 8, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" + } }, { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 2, + "column": { + "name": "c_binary", + "length": 3, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ + "number": 3, + "column": { + "name": "c_varbinary", + "length": 10, + "table": { + "schema": "public", + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + } + }, { - "number": 1, + "number": 4, "column": { - "name": "ids", - "notNull": true, + "name": "c_tinyblob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "tinyblob" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_tinyblob" } }, { - "number": 2, + "number": 5, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "blob" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?)", - "name": "CreateBook", - "cmd": ":execlastid", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_mediumblob", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "mediumblob" }, - "originalName": "name" + "originalName": "c_mediumblob" } }, { - "number": 2, + "number": 7, "column": { - "name": "author_id", - "notNull": true, + "name": "c_longblob", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "longblob" }, - "originalName": "author_id" + "originalName": "c_longblob" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "mysql_binary_types" } }, { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description \nFROM authors JOIN books ON authors.id = books.author_id \nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_bit, c_binary, c_varbinary, c_tinyblob, c_blob, c_mediumblob, c_longblob FROM mysql_binary_types LIMIT 1", + "name": "GetMysqlBinaryTypes", + "cmd": ":one", "columns": [ { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio\nFROM authors authors1 JOIN authors authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description\nFROM authors JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ + "name": "c_varbinary", + "length": 10, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "varbinary" + }, + "originalName": "c_varbinary" + }, { - "name": "id", - "notNull": true, + "name": "c_tinyblob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "bigint" + "name": "tinyblob" }, - "originalName": "id" + "originalName": "c_tinyblob" }, { - "name": "name", - "notNull": true, + "name": "c_blob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "blob" }, - "originalName": "name" + "originalName": "c_blob" }, { - "name": "bio", + "name": "c_mediumblob", "length": -1, "table": { - "name": "authors" + "name": "mysql_binary_types" }, "type": { - "name": "text" + "name": "mediumblob" }, - "originalName": "bio" + "originalName": "c_mediumblob" }, { - "name": "books", + "name": "c_longblob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "INSERT INTO extended.bios (author_name, name, bio_type, author_type) VALUES (?, ?, ?, ?)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + "text": "SELECT\n COUNT(*) AS cnt,\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nFROM mysql_binary_types\nGROUP BY\n c_bit,\n c_binary,\n c_varbinary,\n c_tinyblob,\n c_blob,\n c_mediumblob,\n c_longblob\nLIMIT 1", + "name": "GetMysqlBinaryTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "author_name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "author_name" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } }, { - "number": 2, - "column": { - "name": "name", - "length": 100, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "varchar" - }, - "originalName": "name" - } + "name": "c_bit", + "length": 8, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "bit" + }, + "originalName": "c_bit" }, { - "number": 3, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } + "name": "c_binary", + "length": 3, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "binary" + }, + "originalName": "c_binary" }, { - "number": 4, - "column": { - "name": "author_type", - "length": 24, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_author_type" - }, - "originalName": "author_type" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "schema": "extended", - "name": "bios" - } - }, - { - "text": "SELECT author_name, name, bio_type, author_type FROM extended.bios WHERE bio_type = ? LIMIT 1", - "name": "GetFirstExtendedBioByType", - "cmd": ":one", - "columns": [ - { - "name": "author_name", - "length": 100, + "name": "c_varbinary", + "length": 10, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "varchar" + "name": "varbinary" }, - "originalName": "author_name" + "originalName": "c_varbinary" }, { - "name": "name", - "length": 100, + "name": "c_tinyblob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "varchar" + "name": "tinyblob" }, - "originalName": "name" + "originalName": "c_tinyblob" }, { - "name": "bio_type", - "length": 13, + "name": "c_blob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "bios_bio_type" + "name": "blob" }, - "originalName": "bio_type" + "originalName": "c_blob" }, { - "name": "author_type", - "length": 24, + "name": "c_mediumblob", + "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "mysql_binary_types" }, "type": { - "name": "bios_author_type" + "name": "mediumblob" }, - "originalName": "author_type" - } - ], - "parameters": [ + "originalName": "c_mediumblob" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": 13, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "bios_bio_type" - }, - "originalName": "bio_type" - } + "name": "c_longblob", + "length": -1, + "table": { + "name": "mysql_binary_types" + }, + "type": { + "name": "longblob" + }, + "originalName": "c_longblob" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE mysql_binary_types", + "name": "TruncateMysqlBinaryTypes", "cmd": ":exec", "filename": "query.sql" + }, + { + "text": "\nSELECT\n MAX(c_int) AS max_int,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM mysql_numeric_types\nCROSS JOIN mysql_string_types\nCROSS JOIN mysql_datetime_types", + "name": "GetMysqlFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_int", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + } + ], + "comments": [ + " Functions " + ], + "filename": "query.sql" } ], "sqlc_version": "v1.27.0", diff --git a/examples/MySqlConnectorLegacyExample/request.message b/examples/MySqlConnectorLegacyExample/request.message index 347877f2596ff2be102aa29e4dd82d73c8ec24c1..41fbe903aa0a4a75b9549c6adc03060839354d0a 100644 GIT binary patch delta 129 zcmZ2+l5xdJMt-gztXzynLae!!#f3Ri8WR)6CeGvFDNfEv%}vw;38_s?l$of_#iXPT zmJ^&9ufB18GSlV_+(wL>1)1k@Om^Z;VbSCMYSXwwX3o;usvMA|uNlq?w6P@hf#xgmH*Jbi!p5)2P ic<;0Q=jPhW#5LJheeUL$id7DqTlfwNZB9;dWdZ=y5-R`z diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index fa7de08a..0cf808d6 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -9,6 +9,19 @@ using System.Xml; namespace NpgsqlDapperExampleGen; +public class Author +{ + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } +}; +public class Book +{ + public required Guid Id { get; init; } + public required string Name { get; init; } + public required long AuthorId { get; init; } + public string? Description { get; init; } +}; public class PostgresType { public bool? CBoolean { get; init; } @@ -76,19 +89,6 @@ public class PostgresGeometricType public NpgsqlPolygon? CPolygon { get; init; } public NpgsqlCircle? CCircle { get; init; } }; -public class Author -{ - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } -}; -public class Book -{ - public required Guid Id { get; init; } - public required string Name { get; init; } - public required long AuthorId { get; init; } - public string? Description { get; init; } -}; public class ExtendedBio { public required string AuthorName { get; init; } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 70322a46..7535cc3d 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -43,589 +43,602 @@ public static QuerySql WithTransaction(NpgsqlTransaction transaction) private NpgsqlTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; - public class InsertPostgresTypesArgs + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; + public class GetAuthorRow { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) + public class GetAuthorArgs + { + public required string Name { get; init; } + }; + public async Task GetAuthor(GetAuthorArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public class ListAuthorsRow { - public bool? CBoolean { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public Guid? CUuid { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CUuid); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow + public class ListAuthorsArgs { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } + public required int Offset { get; init; } + public required int Limit { get; init; } }; - public async Task GetPostgresTypes() + public async Task> ListAuthors(ListAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("offset", args.Offset); + queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; + var result = await connection.QueryAsync(ListAuthorsSql, queryParams); + return result.AsList(); } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow { - public short? CSmallint { get; init; } - public bool? CBoolean { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } - public Guid? CUuid { get; init; } - public required long Cnt { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task GetPostgresTypesCnt() + public class CreateAuthorArgs + { + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; - public class GetPostgresFunctionsRow + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow { - public int? MaxInteger { get; init; } - public string? MaxVarchar { get; init; } - public required DateTime MaxTimestamp { get; init; } + public required long Id { get; init; } }; - public async Task GetPostgresFunctions() + public class CreateAuthorReturnIdArgs { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); - } - - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + public required string Name { get; init; } + public string? Bio { get; init; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresTypesSql); - return; + return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; - public class InsertPostgresStringTypesArgs + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow { - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + public class GetAuthorByIdArgs + { + public required long Id { get; init; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); + queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresStringTypesBatchArgs + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public class GetAuthorByNamePatternRow { - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresStringTypesBatch(List args) + public class GetAuthorByNamePatternArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public string? NamePattern { get; init; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name_pattern", args.NamePattern); + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); - } - - await writer.CompleteAsync(); + var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); + return result.AsList(); } - - await connection.CloseAsync(); } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; - public class GetPostgresStringTypesRow + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs { - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } + public required string Name { get; init; } }; - public async Task GetPostgresStringTypes() + public async Task DeleteAuthor(DeleteAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); - return result; - } + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; - public async Task TruncatePostgresStringTypes() + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresStringTypesSql); + await connection.ExecuteAsync(TruncateAuthorsSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; - public class GetPostgresStringTypesCntRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public class UpdateAuthorsArgs { - public string? CChar { get; init; } - public string? CVarchar { get; init; } - public string? CCharacterVarying { get; init; } - public string? CBpchar { get; init; } - public string? CText { get; init; } - public required long Cnt { get; init; } + public string? Bio { get; init; } }; - public async Task GetPostgresStringTypesCnt() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); - return result; - } + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; - public class GetPostgresStringTypesTextSearchRow + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; + public class GetAuthorsByIdsRow { - public string? CText { get; init; } - public required NpgsqlTsQuery Query { get; init; } - public required NpgsqlTsVector Tsv { get; init; } - public required float Rnk { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public class GetPostgresStringTypesTextSearchArgs + public class GetAuthorsByIdsArgs { - public required string ToTsquery { get; init; } + public required long[] LongArr1 { get; init; } }; - public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { var queryParams = new Dictionary(); - queryParams.Add("to_tsquery", args.ToTsquery); + queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); - return result; + var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); + return result.AsList(); } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; - public class InsertPostgresDateTimeTypesArgs + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; + public class GetAuthorsByIdsAndNamesRow { - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } }; - public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + public class GetAuthorsByIdsAndNamesArgs + { + public required long[] LongArr1 { get; init; } + public required string[] StringArr2 { get; init; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); + queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); - return; + { + var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); + return result.AsList(); + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; - public class GetPostgresDateTimeTypesRow + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } + public required Guid Id { get; init; } }; - public async Task GetPostgresDateTimeTypes() + public class CreateBookArgs + { + public required string Name { get; init; } + public required long AuthorId { get; init; } + }; + public async Task CreateBook(CreateBookArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); - return result; - } + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; - public async Task TruncatePostgresDateTimeTypes() + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow + { + public required Author? Author { get; init; } + public required Book? Book { get; init; } + }; + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); - return; + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; - public class GetPostgresDateTimeTypesCntRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } - public required long Cnt { get; init; } + public required Author? Author { get; init; } + public required Author? Author2 { get; init; } }; - public async Task GetPostgresDateTimeTypesCnt() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); - return result; + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } - private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresDateTimeTypesBatchArgs + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public DateTime? CDate { get; init; } - public TimeSpan? CTime { get; init; } - public DateTime? CTimestamp { get; init; } - public DateTime? CTimestampWithTz { get; init; } - public TimeSpan? CInterval { get; init; } + public required long Id { get; init; } + public required string Name { get; init; } + public string? Bio { get; init; } + public required Book? Book { get; init; } }; - public async Task InsertPostgresDateTimeTypesBatch(List args) + public class GetAuthorsByBookNameArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public required string Name { get; init; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + { + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - foreach (var row in args) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; - public class InsertPostgresNetworkTypesArgs + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs { - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } + public required string AuthorName { get; init; } + public required string Name { get; init; } + public ExtendedBioType? BioType { get; init; } }; - public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + public async Task CreateExtendedBio(CreateExtendedBioArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; - public class GetPostgresNetworkTypesRow + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow { - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public string? CMacaddr8 { get; init; } + public required string AuthorName { get; init; } + public required string Name { get; init; } + public ExtendedBioType? BioType { get; init; } }; - public async Task GetPostgresNetworkTypes() + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; init; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; - public async Task TruncatePostgresNetworkTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + await connection.ExecuteAsync(TruncateExtendedBiosSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; - public class GetPostgresNetworkTypesCntRow + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + public class InsertPostgresTypesArgs { - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } - public required long Cnt { get; init; } + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } }; - public async Task GetPostgresNetworkTypesCnt() + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); - return result; - } + await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresNetworkTypesBatchArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public NpgsqlCidr? CCidr { get; init; } - public IPAddress? CInet { get; init; } - public PhysicalAddress? CMacaddr { get; init; } + public bool? CBoolean { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public Guid? CUuid { get; init; } }; - public async Task InsertPostgresNetworkTypesBatch(List args) + public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + await writer.WriteAsync(row.CUuid); } await writer.CompleteAsync(); @@ -635,154 +648,160 @@ public async Task InsertPostgresNetworkTypesBatch(List GetPostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public class GetPostgresUnstructuredTypesRow + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + public class GetPostgresTypesCntRow { - public JsonElement? CJson { get; init; } - public string? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public string? CXmlStringOverride { get; init; } + public short? CSmallint { get; init; } + public bool? CBoolean { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public float? CReal { get; init; } + public decimal? CNumeric { get; init; } + public decimal? CDecimal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public Guid? CUuid { get; init; } + public required long Cnt { get; init; } }; - public async Task GetPostgresUnstructuredTypes() + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + public class GetPostgresFunctionsRow + { + public int? MaxInteger { get; init; } + public string? MaxVarchar { get; init; } + public required DateTime MaxTimestamp { get; init; } + }; + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; - public class InsertPostgresArrayTypesArgs - { - public byte[]? CBytea { get; init; } - public bool[]? CBooleanArray { get; init; } - public string[]? CTextArray { get; init; } - public int[]? CIntegerArray { get; init; } - public decimal[]? CDecimalArray { get; init; } - public DateTime[]? CDateArray { get; init; } - public DateTime[]? CTimestampArray { get; init; } - }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_bytea", args.CBytea); - queryParams.Add("c_boolean_array", args.CBooleanArray); - queryParams.Add("c_text_array", args.CTextArray); - queryParams.Add("c_integer_array", args.CIntegerArray); - queryParams.Add("c_decimal_array", args.CDecimalArray); - queryParams.Add("c_date_array", args.CDateArray); - queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs { - public byte[]? CBytea { get; init; } - public bool[]? CBooleanArray { get; init; } - public string[]? CTextArray { get; init; } - public int[]? CIntegerArray { get; init; } - public decimal[]? CDecimalArray { get; init; } - public DateTime[]? CDateArray { get; init; } - public DateTime[]? CTimestampArray { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } }; - public async Task GetPostgresArrayTypes() + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); - return result; - } + await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs { - public byte[]? CBytea { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } }; - public async Task InsertPostgresArrayTypesBatch(List args) + public async Task InsertPostgresStringTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); } await writer.CompleteAsync(); @@ -792,671 +811,652 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() + public async Task GetPostgresStringTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + await connection.ExecuteAsync(TruncatePostgresStringTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; - public class InsertPostgresGeoTypesArgs + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public string? CChar { get; init; } + public string? CVarchar { get; init; } + public string? CCharacterVarying { get; init; } + public string? CBpchar { get; init; } + public string? CText { get; init; } + public required long Cnt { get; init; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public async Task GetPostgresStringTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public string? CText { get; init; } + public required NpgsqlTsQuery Query { get; init; } + public required NpgsqlTsVector Tsv { get; init; } + public required float Rnk { get; init; } }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + public class GetPostgresStringTypesTextSearchArgs { - public NpgsqlPoint? CPoint { get; init; } - public NpgsqlLine? CLine { get; init; } - public NpgsqlLSeg? CLseg { get; init; } - public NpgsqlBox? CBox { get; init; } - public NpgsqlPath? CPath { get; init; } - public NpgsqlPolygon? CPolygon { get; init; } - public NpgsqlCircle? CCircle { get; init; } + public required string ToTsquery { get; init; } }; - public async Task GetPostgresGeoTypes() + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("to_tsquery", args.ToTsquery); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs + { + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + }; + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorArgs + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow { - public required string Name { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task GetPostgresDateTimeTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public class ListAuthorsRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class ListAuthorsArgs - { - public required int Offset { get; init; } - public required int Limit { get; init; } - }; - public async Task> ListAuthors(ListAuthorsArgs args) + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() { - var queryParams = new Dictionary(); - queryParams.Add("offset", args.Offset); - queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(ListAuthorsSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class CreateAuthorArgs + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } + public required long Cnt { get; init; } }; - public async Task CreateAuthor(CreateAuthorArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); + public async Task GetPostgresDateTimeTypesCnt() + { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs { - public required long Id { get; init; } + public DateTime? CDate { get; init; } + public TimeSpan? CTime { get; init; } + public DateTime? CTimestamp { get; init; } + public DateTime? CTimestampWithTz { get; init; } + public TimeSpan? CInterval { get; init; } }; - public class CreateAuthorReturnIdArgs + public async Task InsertPostgresDateTimeTypesBatch(List args) { - public required string Name { get; init; } - public string? Bio { get; init; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs + { + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); + await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorByIdArgs + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow { - public required long Id { get; init; } + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public string? CMacaddr8 { get; init; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresNetworkTypes() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public class GetAuthorByNamePatternRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorByNamePatternArgs - { - public string? NamePattern { get; init; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow { - public required string Name { get; init; } + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + public required long Cnt { get; init; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task GetPostgresNetworkTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs { - if (this.Transaction == null) + public NpgsqlCidr? CCidr { get; init; } + public IPAddress? CInet { get; init; } + public PhysicalAddress? CMacaddr { get; init; } + }; + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateAuthorsSql); - return; - } + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public class UpdateAuthorsArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; + public class InsertPostgresUnstructuredTypesArgs { - public string? Bio { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; - public class GetAuthorsByIdsRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorsByIdsArgs + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; + public class GetPostgresUnstructuredTypesRow { - public required long[] LongArr1 { get; init; } + public JsonElement? CJson { get; init; } + public string? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public string? CXmlStringOverride { get; init; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetPostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); - return result.AsList(); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; - public class GetAuthorsByIdsAndNamesRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public required long[] LongArr1 { get; init; } - public required string[] StringArr2 { get; init; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); - queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public required Guid Id { get; init; } - }; - public class CreateBookArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; + public class InsertPostgresArrayTypesArgs { - public required string Name { get; init; } - public required long AuthorId { get; init; } + public byte[]? CBytea { get; init; } + public bool[]? CBooleanArray { get; init; } + public string[]? CTextArray { get; init; } + public int[]? CIntegerArray { get; init; } + public decimal[]? CDecimalArray { get; init; } + public DateTime[]? CDateArray { get; init; } + public DateTime[]? CTimestampArray { get; init; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); + queryParams.Add("c_bytea", args.CBytea); + queryParams.Add("c_boolean_array", args.CBooleanArray); + queryParams.Add("c_text_array", args.CTextArray); + queryParams.Add("c_integer_array", args.CIntegerArray); + queryParams.Add("c_decimal_array", args.CDecimalArray); + queryParams.Add("c_date_array", args.CDateArray); + queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateBookSql, queryParams); + await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public required Author? Author { get; init; } - public required Book? Book { get; init; } + public byte[]? CBytea { get; init; } + public bool[]? CBooleanArray { get; init; } + public string[]? CTextArray { get; init; } + public int[]? CIntegerArray { get; init; } + public decimal[]? CDecimalArray { get; init; } + public DateTime[]? CDateArray { get; init; } + public DateTime[]? CTimestampArray { get; init; } }; - public async Task> ListAllAuthorsBooks() + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs { - public required Author? Author { get; init; } - public required Author? Author2 { get; init; } + public byte[]? CBytea { get; init; } }; - public async Task> GetDuplicateAuthors() + public async Task InsertPostgresArrayTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + foreach (var row in args) { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea); } - } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public required long Id { get; init; } - public required string Name { get; init; } - public string? Bio { get; init; } - public required Book? Book { get; init; } - }; - public class GetAuthorsByBookNameArgs + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea, COUNT(*) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1"; + public class GetPostgresArrayTypesCntRow { - public required string Name { get; init; } + public byte[]? CBytea { get; init; } + public required long Cnt { get; init; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + } + + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() + { + if (this.Transaction == null) { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + return; } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; - public class CreateExtendedBioArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; + public class InsertPostgresGeoTypesArgs { - public required string AuthorName { get; init; } - public required string Name { get; init; } - public ExtendedBioType? BioType { get; init; } + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("author_name", args.AuthorName); - queryParams.Add("name", args.Name); - queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs { - public required string AuthorName { get; init; } - public required string Name { get; init; } - public ExtendedBioType? BioType { get; init; } + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public class GetFirstExtendedBioByTypeArgs + public async Task InsertPostgresGeoTypesBatch(List args) { - public ExtendedBioType? BioType { get; init; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; init; } + public NpgsqlLine? CLine { get; init; } + public NpgsqlLSeg? CLseg { get; init; } + public NpgsqlBox? CBox { get; init; } + public NpgsqlPath? CPath { get; init; } + public NpgsqlPolygon? CPolygon { get; init; } + public NpgsqlCircle? CCircle { get; init; } }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + public async Task GetPostgresGeoTypes() { - var queryParams = new Dictionary(); - queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateExtendedBiosSql); + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } } \ No newline at end of file diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index fe14429b..6f839469 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/types/schema.sql", - "examples/config/postgresql/authors/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/types/query.sql", - "examples/config/postgresql/authors/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperExample", @@ -25,6 +25,96 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "postgres_types" @@ -565,96 +655,6 @@ } } ] - }, - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] } ], "enums": [ @@ -32575,1303 +32575,1521 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 4, + "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int4" + "name": "text" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_bigint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "offset", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_numeric", + "name": "limit", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" + "name": "integer" + } } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 9, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "id", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "bigserial" }, - "originalName": "c_double_precision" + "originalName": "id" } }, { - "number": 10, + "number": 2, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } }, { - "number": 11, + "number": 3, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" - } - }, - { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "originalName": "bio" } } ], - "comments": [ - " Basic types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "authors" } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.bool" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } }, { "number": 2, "column": { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int2" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "bio" } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_numeric", + "name": "id", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.numeric" + "name": "bigserial" }, - "originalName": "c_numeric" + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 9, + "number": 1, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 10, + "number": 1, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" + "originalName": "bio" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { - "name": "c_boolean", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bool" + "name": "bigserial" }, - "originalName": "c_boolean" + "originalName": "id" }, { - "name": "c_bit", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bit" + "name": "text" }, - "originalName": "c_bit" + "originalName": "name" }, { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int2" + "name": "text" }, - "originalName": "c_smallint" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_integer", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int4" - }, - "originalName": "c_integer" - }, + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bigserial" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_numeric", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "numeric" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "float8" - }, - "originalName": "c_double_precision" - }, + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "name": "c_money", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "books" }, "type": { - "name": "money" + "name": "uuid" }, - "originalName": "c_money" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_uuid", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_enum", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" + "type": {}, + "embedTable": { + "name": "books" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "c_smallint", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int2" - }, - "originalName": "c_smallint" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int4" + "name": "bigserial" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_bigint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "text" }, - "originalName": "c_bigint" + "originalName": "name" }, { - "name": "c_real", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "bio" }, { - "name": "c_numeric", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_numeric" + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { "schema": "pg_catalog", - "name": "float8" + "name": "varchar" }, - "originalName": "c_double_precision" + "originalName": "author_name" }, { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "money" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_money" + "originalName": "name" }, { - "name": "c_uuid", + "name": "bio_type", "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "uuid" + "schema": "extended", + "name": "bio_type" }, - "originalName": "c_uuid" - }, + "originalName": "bio_type" + } + ], + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_real", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_decimal", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" + } + }, + { + "number": 11, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 12, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.bool" }, - "originalName": "c_char" + "originalName": "c_boolean" } }, { "number": 2, "column": { - "name": "c_varchar", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int2" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 3, "column": { - "name": "c_character_varying", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int4" }, - "originalName": "c_character_varying" + "originalName": "c_integer" } }, { "number": 4, "column": { - "name": "c_bpchar", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "pg_catalog.int8" }, - "originalName": "c_bpchar" + "originalName": "c_bigint" } }, { "number": 5, "column": { - "name": "c_text", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "c_text" + "originalName": "c_real" } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_string_types" - } - }, - { - "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "c_char", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_char" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_varchar", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_varchar" + "originalName": "c_decimal" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_character_varying", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.float8" }, - "originalName": "c_character_varying" + "originalName": "c_double_precision" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_bpchar", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "money" }, - "originalName": "c_bpchar" + "originalName": "c_money" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" }, - "originalName": "c_text" + "originalName": "c_uuid" } } ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_string_types" + "name": "postgres_types" } }, { - "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", - "name": "GetPostgresStringTypes", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", "cmd": ":one", "columns": [ { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "bool" }, - "originalName": "c_char" + "originalName": "c_boolean" }, { - "name": "c_varchar", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "bit" }, - "originalName": "c_varchar" + "originalName": "c_bit" }, { - "name": "c_character_varying", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "int2" }, - "originalName": "c_character_varying" + "originalName": "c_smallint" }, { - "name": "c_bpchar", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "int4" }, - "originalName": "c_bpchar" + "originalName": "c_integer" }, { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_string_types", - "name": "TruncatePostgresStringTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", - "name": "GetPostgresStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_char", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "int8" }, - "originalName": "c_char" + "originalName": "c_bigint" }, { - "name": "c_varchar", + "name": "c_real", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "float4" }, - "originalName": "c_varchar" + "originalName": "c_real" }, { - "name": "c_character_varying", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "numeric" }, - "originalName": "c_character_varying" + "originalName": "c_numeric" }, { - "name": "c_bpchar", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_bpchar" + "originalName": "c_decimal" }, { - "name": "c_text", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "float8" }, - "originalName": "c_text" + "originalName": "c_double_precision" }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, - { - "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", - "name": "GetPostgresStringTypesTextSearch", - "cmd": ":one", - "columns": [ - { - "name": "c_text", + "name": "c_money", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "money" }, - "originalName": "c_text" + "originalName": "c_money" }, { - "name": "query", - "notNull": true, + "name": "c_uuid", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsquery" + "name": "uuid" }, - "originalName": "query" + "originalName": "c_uuid" }, { - "name": "tsv", - "notNull": true, + "name": "c_enum", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsvector" + "name": "c_enum" }, - "originalName": "tsv" - }, - { - "name": "rnk", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "real" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "to_tsquery", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } - } + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 2, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 3, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 4, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 5, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - } - ], - "comments": [ - " DateTime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_datetime_types" - } - }, - { - "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", - "name": "GetPostgresDateTimeTypes", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_date", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "int2" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_time", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "bool" }, - "originalName": "c_time" + "originalName": "c_boolean" }, { - "name": "c_timestamp", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "int4" }, - "originalName": "c_timestamp" + "originalName": "c_integer" }, { - "name": "c_timestamp_with_tz", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "int8" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bigint" }, { - "name": "c_interval", + "name": "c_real", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "float4" }, - "originalName": "c_interval" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_datetime_types", - "name": "TruncatePostgresDateTimeTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", - "name": "GetPostgresDateTimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_real" + }, { - "name": "c_date", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_time", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "numeric" }, - "originalName": "c_time" + "originalName": "c_decimal" }, { - "name": "c_timestamp", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "float8" }, - "originalName": "c_timestamp" + "originalName": "c_double_precision" }, { - "name": "c_timestamp_with_tz", + "name": "c_money", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "timestamptz" + "name": "money" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_money" }, { - "name": "c_interval", + "name": "c_uuid", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "interval" + "name": "uuid" }, - "originalName": "c_interval" + "originalName": "c_uuid" }, { "name": "cnt", @@ -33879,262 +34097,362 @@ "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_date", + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "date" + "name": "pg_catalog.bpchar" }, - "originalName": "c_date" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_time", + "name": "c_varchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.time" + "name": "pg_catalog.varchar" }, - "originalName": "c_time" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_timestamp", + "name": "c_character_varying", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.varchar" }, - "originalName": "c_timestamp" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_timestamp_with_tz", + "name": "c_bpchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamptz" + "name": "bpchar" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bpchar" } }, { "number": 5, "column": { - "name": "c_interval", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.interval" + "name": "text" }, - "originalName": "c_interval" + "originalName": "c_text" } } ], + "comments": [ + " String types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_datetime_types" + "name": "postgres_string_types" } }, { - "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", - "name": "InsertPostgresNetworkTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_cidr", + "name": "c_char", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "pg_catalog.bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_inet", + "name": "c_varchar", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "pg_catalog.varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "name": "pg_catalog.varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_macaddr8", + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, "type": { - "name": "macaddr8" - } + "name": "text" + }, + "originalName": "c_text" } } ], - "comments": [ - " Network types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_network_types" + "name": "postgres_string_types" } }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", - "name": "GetPostgresNetworkTypes", + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" }, { - "name": "c_macaddr8", - "notNull": true, + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", "length": -1, + "table": { + "name": "postgres_string_types" + }, "type": { "name": "text" - } + }, + "originalName": "c_text" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_network_types", - "name": "TruncatePostgresNetworkTypes", + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", - "name": "GetPostgresNetworkTypesCnt", + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" }, { "name": "cnt", @@ -34149,488 +34467,295 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", - "name": "InsertPostgresNetworkTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 2, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 3, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_network_types" - } - }, - { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 2, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 3, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 4, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 5, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 6, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - } - ], - "comments": [ - " Unstructured types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_unstructured_types" - } - }, - { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", "cmd": ":one", "columns": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", + "name": "c_text", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "json" + "name": "text" }, - "originalName": "c_json_string_override" + "originalName": "c_text" }, { - "name": "c_jsonb", + "name": "query", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonb" + "name": "tsquery" }, - "originalName": "c_jsonb" + "originalName": "query" }, { - "name": "c_jsonpath", + "name": "tsv", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonpath" + "name": "tsvector" }, - "originalName": "c_jsonpath" + "originalName": "tsv" }, { - "name": "c_xml", + "name": "rnk", + "notNull": true, "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, + "isFuncCall": true, "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, + "name": "real" + } + } + ], + "parameters": [ { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresArrayTypes", + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.bool" + "name": "pg_catalog.time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" } }, { "number": 4, - "column": { - "name": "c_integer_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer_array", - "arrayDims": 1 - } - }, - { - "number": 5, - "column": { - "name": "c_decimal_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal_array", - "arrayDims": 1 - } - }, - { - "number": 6, - "column": { - "name": "c_date_array", - "isArray": true, + "column": { + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "date" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" } }, { - "number": 7, + "number": 5, "column": { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.interval" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 + "originalName": "c_interval" } } ], "comments": [ - " Array types " + " DateTime types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" } }, { - "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", - "name": "GetPostgresArrayTypes", + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", "cmd": ":one", "columns": [ { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" }, { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "bool" + "name": "time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" }, { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" }, { - "name": "c_integer_array", - "isArray": true, + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "int4" + "name": "timestamptz" }, - "originalName": "c_integer_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" }, { - "name": "c_decimal_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "interval" }, - "originalName": "c_decimal_array", - "arrayDims": 1 - }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_date_array", - "isArray": true, + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "name": "date" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_date" }, { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", "name": "timestamp" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", - "name": "InsertPostgresArrayTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + "originalName": "c_timestamp" + }, { - "number": 1, - "column": { - "name": "c_bytea", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "bytea" - }, - "originalName": "c_bytea" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_array_types" - } - }, - { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", - "name": "GetPostgresArrayTypesCnt", - "cmd": ":one", - "columns": [ + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, { - "name": "c_bytea", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "schema": "pg_catalog", + "name": "interval" }, - "originalName": "c_bytea" + "originalName": "c_interval" }, { "name": "cnt", @@ -34645,1218 +34770,1093 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_array_types", - "name": "TruncatePostgresArrayTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "point" + "name": "date" }, - "originalName": "c_point" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "line" + "name": "pg_catalog.time" }, - "originalName": "c_line" + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "lseg" + "name": "pg_catalog.timestamp" }, - "originalName": "c_lseg" + "originalName": "c_timestamp" } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "box" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_box" + "originalName": "c_timestamp_with_tz" } }, { "number": 5, "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "circle" + "name": "pg_catalog.interval" }, - "originalName": "c_circle" + "originalName": "c_interval" } } ], - "comments": [ - " Geometric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" } }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_cidr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "point" + "name": "cidr" }, - "originalName": "c_point" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_inet", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "line" + "name": "inet" }, - "originalName": "c_line" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_macaddr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "lseg" + "name": "macaddr" }, - "originalName": "c_lseg" + "originalName": "c_macaddr" } }, { "number": 4, "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_macaddr8", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "macaddr8" + } } } ], + "comments": [ + " Network types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_network_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - }, - { - "name": "c_lseg", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - }, - { - "name": "c_box", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - }, - { - "name": "c_path", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - }, - { - "name": "c_polygon", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - }, - { - "name": "c_circle", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } + "originalName": "c_macaddr" }, { - "number": 2, - "column": { - "name": "limit", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_network_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "json" + } } }, { "number": 2, "column": { - "name": "bio", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "json" + } + } + }, + { + "number": 3, + "column": { + "name": "c_jsonb", + "length": -1, + "type": { + "name": "jsonb" + } + } + }, + { + "number": 4, + "column": { + "name": "c_jsonpath", + "length": -1, + "type": { + "name": "jsonpath" + } + } + }, + { + "number": 5, + "column": { + "name": "c_xml", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 6, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", + "name": "c_jsonb", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "jsonb" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_jsonb" + }, { - "name": "id", - "notNull": true, + "name": "c_jsonpath", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "jsonpath" }, - "originalName": "id" + "originalName": "c_jsonpath" }, { - "name": "name", - "notNull": true, + "name": "c_xml", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "name" + "originalName": "c_xml" }, { - "name": "bio", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "bio" + "originalName": "c_xml_string_override" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresArrayTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "c_bytea", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "bytea" }, - "originalName": "name" + "originalName": "c_bytea" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean_array", + "arrayDims": 1 } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", + "name": "c_text_array", + "isArray": true, "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_array_types" }, "type": { "name": "text" }, - "originalName": "bio" + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_date_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date_array", + "arrayDims": 1 + } + }, + { + "number": 7, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 } } ], - "filename": "query.sql" + "comments": [ + " Array types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", + "name": "GetPostgresArrayTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "bytea" + }, + "originalName": "c_bytea" + }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "id" + "originalName": "c_decimal_array", + "arrayDims": 1 }, { - "name": "name", - "notNull": true, + "name": "c_date_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "name" + "originalName": "c_date_array", + "arrayDims": 1 }, { - "name": "bio", + "name": "c_timestamp_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "bio" + "originalName": "c_timestamp_array", + "arrayDims": 1 } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "name": "InsertPostgresArrayTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_bytea", "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bytea" }, - "arrayDims": 1 + "originalName": "c_bytea" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", + "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "name": "GetPostgresArrayTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "name": "bytea" }, - "originalName": "id" + "originalName": "c_bytea" }, { - "name": "name", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_array_types", + "name": "TruncatePostgresArrayTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_point", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "point" }, - "arrayDims": 1 + "originalName": "c_point" } }, { "number": 2, "column": { - "notNull": true, - "isArray": true, + "name": "c_line", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "name": "text" + "name": "line" }, - "arrayDims": 1 + "originalName": "c_line" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "name", - "notNull": true, + "name": "c_lseg", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "lseg" }, - "originalName": "name" + "originalName": "c_lseg" } }, { - "number": 2, + "number": 4, "column": { - "name": "author_id", - "notNull": true, + "name": "c_box", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "box" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_box" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } } ], - "filename": "query.sql" + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "name", - "notNull": true, + "name": "c_box", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "box" }, - "originalName": "name" + "originalName": "c_box" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 5, "column": { - "name": "author_name", - "notNull": true, + "name": "c_path", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "path" }, - "originalName": "author_name" + "originalName": "c_path" } }, { - "number": 2, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_polygon", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "polygon" }, - "originalName": "name" + "originalName": "c_polygon" } }, { - "number": 3, + "number": 7, "column": { - "name": "bio_type", + "name": "c_circle", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "extended.bio_type" + "name": "circle" }, - "originalName": "bio_type" + "originalName": "c_circle" } } ], "filename": "query.sql", "insert_into_table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" } }, { - "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", - "name": "GetFirstExtendedBioByType", + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", "cmd": ":one", "columns": [ { - "name": "author_name", - "notNull": true, + "name": "c_point", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "point" }, - "originalName": "author_name" + "originalName": "c_point" }, { - "name": "name", - "notNull": true, + "name": "c_line", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "line" }, - "originalName": "name" + "originalName": "c_line" }, { - "name": "bio_type", + "name": "c_lseg", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "extended", - "name": "bio_type" + "name": "lseg" }, - "originalName": "bio_type" - } - ], - "parameters": [ + "originalName": "c_lseg" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": -1, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "extended.bio_type" - }, - "originalName": "bio_type" - } + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 48d5109e..eb1415fe 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -1,9 +1,19 @@ д 2 -postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb▄ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunner■ы public"└public▄ +./dist/LocalRunner■ы public"└publicГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -82,17 +92,7 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtext" +c_circle0         Rpostgres_geometric_typesbcircle" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10221,7 +10221,110 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╧ +bio_type Autobiography BiographyMemoirР +9SELECT id, name, bio FROM authors +WHERE name = $1 LIMIT 1 GetAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*0, +name0         R authorsbtextzname: query.sql║ +CSELECT id, name, bio +FROM authors +ORDER BY name +LIMIT $2 +OFFSET $1 ListAuthors:many"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*&" +offset0         8b integer*%! +limit0         8b integer: query.sqlн +OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*95 +id0         Rpublicauthorsb  bigserialzid*84 +name0         Rpublicauthorsbtextzname*40 +bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ + GetAuthor(GetAuthorArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public class ListAuthorsRow { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); - await writer.WriteAsync(row.CUuid); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow + public class ListAuthorsArgs { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } + public int Offset { get; set; } + public int Limit { get; set; } }; - public async Task GetPostgresTypes() + public async Task> ListAuthors(ListAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("offset", args.Offset); + queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; + var result = await connection.QueryAsync(ListAuthorsSql, queryParams); + return result.AsList(); } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public long Cnt { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresTypesCnt() + public class CreateAuthorArgs + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("id", args.Id); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; - public class GetPostgresFunctionsRow + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow { - public int? MaxInteger { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public long Id { get; set; } }; - public async Task GetPostgresFunctions() + public class CreateAuthorReturnIdArgs { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); - } - - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresTypesSql); - return; + return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; - public class InsertPostgresStringTypesArgs + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_char", args.CChar); - queryParams.Add("c_varchar", args.CVarchar); - queryParams.Add("c_character_varying", args.CCharacterVarying); - queryParams.Add("c_bpchar", args.CBpchar); - queryParams.Add("c_text", args.CText); + queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresStringTypesBatchArgs + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public class GetAuthorByNamePatternRow { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresStringTypesBatch(List args) + public class GetAuthorByNamePatternArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("name_pattern", args.NamePattern); + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CChar); - await writer.WriteAsync(row.CVarchar); - await writer.WriteAsync(row.CCharacterVarying); - await writer.WriteAsync(row.CBpchar); - await writer.WriteAsync(row.CText); - } - - await writer.CompleteAsync(); + var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); + return result.AsList(); } - - await connection.CloseAsync(); } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; - public class GetPostgresStringTypesRow + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } + public string Name { get; set; } }; - public async Task GetPostgresStringTypes() + public async Task DeleteAuthor(DeleteAuthorArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); - return result; - } + await connection.ExecuteAsync(DeleteAuthorSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; - public async Task TruncatePostgresStringTypes() + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresStringTypesSql); + await connection.ExecuteAsync(TruncateAuthorsSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); } - private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; - public class GetPostgresStringTypesCntRow + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public class UpdateAuthorsArgs { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public long Cnt { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresStringTypesCnt() + public async Task UpdateAuthors(UpdateAuthorsArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio", args.Bio); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); - return result; - } + return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; - public class GetPostgresStringTypesTextSearchRow + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; + public class GetAuthorsByIdsRow { - public string CText { get; set; } - public NpgsqlTsQuery Query { get; set; } - public NpgsqlTsVector Tsv { get; set; } - public float Rnk { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public class GetPostgresStringTypesTextSearchArgs + public class GetAuthorsByIdsArgs { - public string ToTsquery { get; set; } + public long[] LongArr1 { get; set; } }; - public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { var queryParams = new Dictionary(); - queryParams.Add("to_tsquery", args.ToTsquery); + queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); - return result; + var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); + return result.AsList(); } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; - public class InsertPostgresDateTimeTypesArgs + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; + public class GetAuthorsByIdsAndNamesRow { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + public class GetAuthorsByIdsAndNamesArgs + { + public long[] LongArr1 { get; set; } + public string[] StringArr2 { get; set; } + }; + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_date", args.CDate); - queryParams.Add("c_time", args.CTime); - queryParams.Add("c_timestamp", args.CTimestamp); - queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); - queryParams.Add("c_interval", args.CInterval); + queryParams.Add("longArr_1", args.LongArr1); + queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); - return; + { + var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); + return result.AsList(); + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); + return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); } - private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; - public class GetPostgresDateTimeTypesRow + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public Guid Id { get; set; } }; - public async Task GetPostgresDateTimeTypes() + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("name", args.Name); + queryParams.Add("author_id", args.AuthorId); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); - return result; - } + return await connection.QuerySingleAsync(CreateBookSql, queryParams); } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; - public async Task TruncatePostgresDateTimeTypes() + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow + { + public Author Author { get; set; } + public Book Book { get; set; } + }; + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); - return; + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } } - private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; - public class GetPostgresDateTimeTypesCntRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public long Cnt { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetPostgresDateTimeTypesCnt() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { - using (var connection = new NpgsqlConnection(ConnectionString)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); - return result; + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } } - private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresDateTimeTypesBatchArgs + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } }; - public async Task InsertPostgresDateTimeTypesBatch(List args) + public class GetAuthorsByBookNameArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + { + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp); - await writer.WriteAsync(row.CTimestampWithTz); - await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; - public class InsertPostgresNetworkTypesArgs + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } }; - public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + public async Task CreateExtendedBio(CreateExtendedBioArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_cidr", args.CCidr); - queryParams.Add("c_inet", args.CInet); - queryParams.Add("c_macaddr", args.CMacaddr); - queryParams.Add("c_macaddr8", args.CMacaddr8); + queryParams.Add("author_name", args.AuthorName); + queryParams.Add("name", args.Name); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; - public class GetPostgresNetworkTypesRow + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } }; - public async Task GetPostgresNetworkTypes() + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; - public async Task TruncatePostgresNetworkTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + await connection.ExecuteAsync(TruncateExtendedBiosSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; - public class GetPostgresNetworkTypesCntRow + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + public class InsertPostgresTypesArgs { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public long Cnt { get; set; } + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; - public async Task GetPostgresNetworkTypesCnt() + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); - return result; - } + await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresNetworkTypesBatchArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } + public bool? CBoolean { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } }; - public async Task InsertPostgresNetworkTypesBatch(List args) + public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CCidr); - await writer.WriteAsync(row.CInet); - await writer.WriteAsync(row.CMacaddr); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + await writer.WriteAsync(row.CUuid); } await writer.CompleteAsync(); @@ -636,154 +649,160 @@ public async Task InsertPostgresNetworkTypesBatch(List GetPostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); - queryParams.Add("c_json_string_override", args.CJsonStringOverride); - queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); - queryParams.Add("c_jsonpath", args.CJsonpath); - queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); - queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public class GetPostgresUnstructuredTypesRow + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + public class GetPostgresTypesCntRow { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public short? CSmallint { get; set; } + public bool? CBoolean { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } + public long Cnt { get; set; } }; - public async Task GetPostgresUnstructuredTypes() + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + public class GetPostgresFunctionsRow + { + public int? MaxInteger { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } + }; + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; - public class InsertPostgresArrayTypesArgs - { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } - }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - var queryParams = new Dictionary(); - queryParams.Add("c_bytea", args.CBytea); - queryParams.Add("c_boolean_array", args.CBooleanArray); - queryParams.Add("c_text_array", args.CTextArray); - queryParams.Add("c_integer_array", args.CIntegerArray); - queryParams.Add("c_decimal_array", args.CDecimalArray); - queryParams.Add("c_date_array", args.CDateArray); - queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + await connection.ExecuteAsync(TruncatePostgresTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } }; - public async Task GetPostgresArrayTypes() + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_char", args.CChar); + queryParams.Add("c_varchar", args.CVarchar); + queryParams.Add("c_character_varying", args.CCharacterVarying); + queryParams.Add("c_bpchar", args.CBpchar); + queryParams.Add("c_text", args.CText); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); - return result; - } + await connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresStringTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs { - public byte[] CBytea { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } }; - public async Task InsertPostgresArrayTypesBatch(List args) + public async Task InsertPostgresStringTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea); + await writer.WriteAsync(row.CChar); + await writer.WriteAsync(row.CVarchar); + await writer.WriteAsync(row.CCharacterVarying); + await writer.WriteAsync(row.CBpchar); + await writer.WriteAsync(row.CText); } await writer.CompleteAsync(); @@ -793,672 +812,653 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() + public async Task GetPostgresStringTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + await connection.ExecuteAsync(TruncatePostgresStringTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresStringTypesSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; - public class InsertPostgresGeoTypesArgs + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public long Cnt { get; set; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public async Task GetPostgresStringTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("c_point", args.CPoint); - queryParams.Add("c_line", args.CLine); - queryParams.Add("c_lseg", args.CLseg); - queryParams.Add("c_box", args.CBox); - queryParams.Add("c_path", args.CPath); - queryParams.Add("c_polygon", args.CPolygon); - queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesCntSql, transaction: this.Transaction); } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public string CText { get; set; } + public NpgsqlTsQuery Query { get; set; } + public NpgsqlTsVector Tsv { get; set; } + public float Rnk { get; set; } }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint); - await writer.WriteAsync(row.CLine); - await writer.WriteAsync(row.CLseg); - await writer.WriteAsync(row.CBox); - await writer.WriteAsync(row.CPath); - await writer.WriteAsync(row.CPolygon); - await writer.WriteAsync(row.CCircle); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + public class GetPostgresStringTypesTextSearchArgs { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public string ToTsquery { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("to_tsquery", args.ToTsquery); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresStringTypesTextSearchSql, queryParams, transaction: this.Transaction); } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) { + var queryParams = new Dictionary(); + queryParams.Add("c_date", args.CDate); + queryParams.Add("c_time", args.CTime); + queryParams.Add("c_timestamp", args.CTimestamp); + queryParams.Add("c_timestamp_with_tz", args.CTimestampWithTz); + queryParams.Add("c_interval", args.CInterval); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); + await connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresDateTimeTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorArgs + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow { - public string Name { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task GetPostgresDateTimeTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesSql, transaction: this.Transaction); } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs - { - public int Offset { get; set; } - public int Limit { get; set; } - }; - public async Task> ListAuthors(ListAuthorsArgs args) + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() { - var queryParams = new Dictionary(); - queryParams.Add("offset", args.Offset); - queryParams.Add("limit", args.Limit); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(ListAuthorsSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(ListAuthorsSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresDateTimeTypesSql, transaction: this.Transaction); } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class CreateAuthorArgs + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public long Cnt { get; set; } }; - public async Task CreateAuthor(CreateAuthorArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); + public async Task GetPostgresDateTimeTypesCnt() + { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(CreateAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresDateTimeTypesCntSql, transaction: this.Transaction); } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs { - public long Id { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } }; - public class CreateAuthorReturnIdArgs + public async Task InsertPostgresDateTimeTypesBatch(List args) { - public string Name { get; set; } - public string Bio { get; set; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp); + await writer.WriteAsync(row.CTimestampWithTz); + await writer.WriteAsync(row.CInterval, NpgsqlDbType.Interval); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs + { + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("bio", args.Bio); + queryParams.Add("c_cidr", args.CCidr); + queryParams.Add("c_inet", args.CInet); + queryParams.Add("c_macaddr", args.CMacaddr); + queryParams.Add("c_macaddr8", args.CMacaddr8); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams); + await connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateAuthorReturnIdSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNetworkTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow { - public long Id { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresNetworkTypes() { - var queryParams = new Dictionary(); - queryParams.Add("id", args.Id); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetAuthorByIdSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesSql, transaction: this.Transaction); } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs - { - public string NamePattern { get; set; } - }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() { - var queryParams = new Dictionary(); - queryParams.Add("name_pattern", args.NamePattern); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(GetAuthorByNamePatternSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresNetworkTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorByNamePatternSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNetworkTypesSql, transaction: this.Transaction); } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow { - public string Name { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task GetPostgresNetworkTypesCnt() { - var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAuthorSql, queryParams); - return; + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql); + return result; + } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAuthorSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNetworkTypesCntSql, transaction: this.Transaction); } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs { - if (this.Transaction == null) + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + }; + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateAuthorsSql); - return; - } + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr); + await writer.WriteAsync(row.CInet); + await writer.WriteAsync(row.CMacaddr); + } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateAuthorsSql, transaction: this.Transaction); + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public class UpdateAuthorsArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; + public class InsertPostgresUnstructuredTypesArgs { - public string Bio { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("bio", args.Bio); + queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); + queryParams.Add("c_json_string_override", args.CJsonStringOverride); + queryParams.Add("c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : null); + queryParams.Add("c_jsonpath", args.CJsonpath); + queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); + queryParams.Add("c_xml_string_override", args.CXmlStringOverride); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.ExecuteAsync(UpdateAuthorsSql, queryParams); + await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.ExecuteAsync(UpdateAuthorsSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; + public class GetPostgresUnstructuredTypesRow { - public long[] LongArr1 { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task GetPostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryAsync(GetAuthorsByIdsSql, queryParams); - return result.AsList(); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsSql, queryParams, transaction: this.Transaction)).AsList(); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs - { - public long[] LongArr1 { get; set; } - public string[] StringArr2 { get; set; } - }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { - var queryParams = new Dictionary(); - queryParams.Add("longArr_1", args.LongArr1); - queryParams.Add("stringArr_2", args.StringArr2); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams); - return result.AsList(); - } + await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return (await this.Transaction.Connection.QueryAsync(GetAuthorsByIdsAndNamesSql, queryParams, transaction: this.Transaction)).AsList(); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public Guid Id { get; set; } - }; - public class CreateBookArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; + public class InsertPostgresArrayTypesArgs { - public string Name { get; set; } - public long AuthorId { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("name", args.Name); - queryParams.Add("author_id", args.AuthorId); + queryParams.Add("c_bytea", args.CBytea); + queryParams.Add("c_boolean_array", args.CBooleanArray); + queryParams.Add("c_text_array", args.CTextArray); + queryParams.Add("c_integer_array", args.CIntegerArray); + queryParams.Add("c_decimal_array", args.CDecimalArray); + queryParams.Add("c_date_array", args.CDateArray); + queryParams.Add("c_timestamp_array", args.CTimestampArray); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - return await connection.QuerySingleAsync(CreateBookSql, queryParams); + await connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams); + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QuerySingleAsync(CreateBookSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresArrayTypesSql, queryParams, transaction: this.Transaction); } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public Author Author { get; set; } - public Book Book { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs { - public Author Author { get; set; } - public Author Author2 { get; set; } + public byte[] CBytea { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task InsertPostgresArrayTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + foreach (var row in args) { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; - } + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea); } - } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetDuplicateAuthorsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea, COUNT(*) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1"; + public class GetPostgresArrayTypesCntRow { - public string Name { get; set; } + public byte[] CBytea { get; set; } + public long Cnt { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) - { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } - } + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql); + return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesCntSql, transaction: this.Transaction); + } + + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() + { + if (this.Transaction == null) { - command.CommandText = GetAuthorsByBookNameSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresArrayTypesSql); + return; } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresArrayTypesSql, transaction: this.Transaction); } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; - public class CreateExtendedBioArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; + public class InsertPostgresGeoTypesArgs { - public string AuthorName { get; set; } - public string Name { get; set; } - public ExtendedBioType? BioType { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("author_name", args.AuthorName); - queryParams.Add("name", args.Name); - queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); + queryParams.Add("c_point", args.CPoint); + queryParams.Add("c_line", args.CLine); + queryParams.Add("c_lseg", args.CLseg); + queryParams.Add("c_box", args.CBox); + queryParams.Add("c_path", args.CPath); + queryParams.Add("c_polygon", args.CPolygon); + queryParams.Add("c_circle", args.CCircle); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(CreateExtendedBioSql, queryParams); + await connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(CreateExtendedBioSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresGeoTypesSql, queryParams, transaction: this.Transaction); } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs { - public string AuthorName { get; set; } - public string Name { get; set; } - public ExtendedBioType? BioType { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public class GetFirstExtendedBioByTypeArgs + public async Task InsertPostgresGeoTypesBatch(List args) { - public ExtendedBioType? BioType { get; set; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint); + await writer.WriteAsync(row.CLine); + await writer.WriteAsync(row.CLseg); + await writer.WriteAsync(row.CBox); + await writer.WriteAsync(row.CPath); + await writer.WriteAsync(row.CPolygon); + await writer.WriteAsync(row.CCircle); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + public async Task GetPostgresGeoTypes() { - var queryParams = new Dictionary(); - queryParams.Add("bio_type", args.BioType != null ? args.BioType.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetFirstExtendedBioByTypeSql, queryParams, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresGeoTypesSql, transaction: this.Transaction); } - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncateExtendedBiosSql); + await connection.ExecuteAsync(TruncatePostgresGeoTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresGeoTypesSql, transaction: this.Transaction); } } } \ No newline at end of file diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 2fe3fd91..957cd318 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/types/schema.sql", - "examples/config/postgresql/authors/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/types/query.sql", - "examples/config/postgresql/authors/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlDapperLegacyExample", @@ -25,6 +25,96 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "postgres_types" @@ -565,96 +655,6 @@ } } ] - }, - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] } ], "enums": [ @@ -32575,1303 +32575,1521 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 4, + "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int4" + "name": "text" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_bigint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "offset", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_numeric", + "name": "limit", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" + "name": "integer" + } } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 9, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "id", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "bigserial" }, - "originalName": "c_double_precision" + "originalName": "id" } }, { - "number": 10, + "number": 2, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } }, { - "number": 11, + "number": 3, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" - } - }, - { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "originalName": "bio" } } ], - "comments": [ - " Basic types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "authors" } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.bool" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } }, { "number": 2, "column": { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int2" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "bio" } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_numeric", + "name": "id", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.numeric" + "name": "bigserial" }, - "originalName": "c_numeric" + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 9, + "number": 1, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 10, + "number": 1, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" + "originalName": "bio" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { - "name": "c_boolean", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bool" + "name": "bigserial" }, - "originalName": "c_boolean" + "originalName": "id" }, { - "name": "c_bit", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bit" + "name": "text" }, - "originalName": "c_bit" + "originalName": "name" }, { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int2" + "name": "text" }, - "originalName": "c_smallint" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_integer", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int4" - }, - "originalName": "c_integer" - }, + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bigserial" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_numeric", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "numeric" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "float8" - }, - "originalName": "c_double_precision" - }, + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "name": "c_money", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "books" }, "type": { - "name": "money" + "name": "uuid" }, - "originalName": "c_money" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_uuid", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_enum", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" + "type": {}, + "embedTable": { + "name": "books" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "c_smallint", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int2" - }, - "originalName": "c_smallint" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int4" + "name": "bigserial" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_bigint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "text" }, - "originalName": "c_bigint" + "originalName": "name" }, { - "name": "c_real", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "bio" }, { - "name": "c_numeric", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_numeric" + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { "schema": "pg_catalog", - "name": "float8" + "name": "varchar" }, - "originalName": "c_double_precision" + "originalName": "author_name" }, { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "money" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_money" + "originalName": "name" }, { - "name": "c_uuid", + "name": "bio_type", "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "uuid" + "schema": "extended", + "name": "bio_type" }, - "originalName": "c_uuid" - }, + "originalName": "bio_type" + } + ], + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_real", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_decimal", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" + } + }, + { + "number": 11, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 12, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.bool" }, - "originalName": "c_char" + "originalName": "c_boolean" } }, { "number": 2, "column": { - "name": "c_varchar", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int2" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 3, "column": { - "name": "c_character_varying", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int4" }, - "originalName": "c_character_varying" + "originalName": "c_integer" } }, { "number": 4, "column": { - "name": "c_bpchar", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "pg_catalog.int8" }, - "originalName": "c_bpchar" + "originalName": "c_bigint" } }, { "number": 5, "column": { - "name": "c_text", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "c_text" + "originalName": "c_real" } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_string_types" - } - }, - { - "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "c_char", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_char" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_varchar", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_varchar" + "originalName": "c_decimal" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_character_varying", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.float8" }, - "originalName": "c_character_varying" + "originalName": "c_double_precision" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_bpchar", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "money" }, - "originalName": "c_bpchar" + "originalName": "c_money" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" }, - "originalName": "c_text" + "originalName": "c_uuid" } } ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_string_types" + "name": "postgres_types" } }, { - "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", - "name": "GetPostgresStringTypes", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", "cmd": ":one", "columns": [ { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "bool" }, - "originalName": "c_char" + "originalName": "c_boolean" }, { - "name": "c_varchar", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "bit" }, - "originalName": "c_varchar" + "originalName": "c_bit" }, { - "name": "c_character_varying", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "int2" }, - "originalName": "c_character_varying" + "originalName": "c_smallint" }, { - "name": "c_bpchar", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "int4" }, - "originalName": "c_bpchar" + "originalName": "c_integer" }, { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_string_types", - "name": "TruncatePostgresStringTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", - "name": "GetPostgresStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_char", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "int8" }, - "originalName": "c_char" + "originalName": "c_bigint" }, { - "name": "c_varchar", + "name": "c_real", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "float4" }, - "originalName": "c_varchar" + "originalName": "c_real" }, { - "name": "c_character_varying", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "numeric" }, - "originalName": "c_character_varying" + "originalName": "c_numeric" }, { - "name": "c_bpchar", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_bpchar" + "originalName": "c_decimal" }, { - "name": "c_text", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "float8" }, - "originalName": "c_text" + "originalName": "c_double_precision" }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, - { - "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", - "name": "GetPostgresStringTypesTextSearch", - "cmd": ":one", - "columns": [ - { - "name": "c_text", + "name": "c_money", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "money" }, - "originalName": "c_text" + "originalName": "c_money" }, { - "name": "query", - "notNull": true, + "name": "c_uuid", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsquery" + "name": "uuid" }, - "originalName": "query" + "originalName": "c_uuid" }, { - "name": "tsv", - "notNull": true, + "name": "c_enum", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsvector" + "name": "c_enum" }, - "originalName": "tsv" - }, - { - "name": "rnk", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "real" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "to_tsquery", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } - } + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 2, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 3, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 4, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 5, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - } - ], - "comments": [ - " DateTime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_datetime_types" - } - }, - { - "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", - "name": "GetPostgresDateTimeTypes", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_date", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "int2" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_time", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "bool" }, - "originalName": "c_time" + "originalName": "c_boolean" }, { - "name": "c_timestamp", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "int4" }, - "originalName": "c_timestamp" + "originalName": "c_integer" }, { - "name": "c_timestamp_with_tz", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "int8" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bigint" }, { - "name": "c_interval", + "name": "c_real", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "float4" }, - "originalName": "c_interval" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_datetime_types", - "name": "TruncatePostgresDateTimeTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", - "name": "GetPostgresDateTimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_real" + }, { - "name": "c_date", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_time", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "numeric" }, - "originalName": "c_time" + "originalName": "c_decimal" }, { - "name": "c_timestamp", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "float8" }, - "originalName": "c_timestamp" + "originalName": "c_double_precision" }, { - "name": "c_timestamp_with_tz", + "name": "c_money", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "timestamptz" + "name": "money" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_money" }, { - "name": "c_interval", + "name": "c_uuid", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "interval" + "name": "uuid" }, - "originalName": "c_interval" + "originalName": "c_uuid" }, { "name": "cnt", @@ -33879,262 +34097,362 @@ "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_date", + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "date" + "name": "pg_catalog.bpchar" }, - "originalName": "c_date" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_time", + "name": "c_varchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.time" + "name": "pg_catalog.varchar" }, - "originalName": "c_time" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_timestamp", + "name": "c_character_varying", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.varchar" }, - "originalName": "c_timestamp" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_timestamp_with_tz", + "name": "c_bpchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamptz" + "name": "bpchar" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bpchar" } }, { "number": 5, "column": { - "name": "c_interval", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.interval" + "name": "text" }, - "originalName": "c_interval" + "originalName": "c_text" } } ], + "comments": [ + " String types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_datetime_types" + "name": "postgres_string_types" } }, { - "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", - "name": "InsertPostgresNetworkTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_cidr", + "name": "c_char", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "pg_catalog.bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_inet", + "name": "c_varchar", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "pg_catalog.varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "name": "pg_catalog.varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_macaddr8", + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, "type": { - "name": "macaddr8" - } + "name": "text" + }, + "originalName": "c_text" } } ], - "comments": [ - " Network types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_network_types" + "name": "postgres_string_types" } }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", - "name": "GetPostgresNetworkTypes", + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" }, { - "name": "c_macaddr8", - "notNull": true, + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", "length": -1, + "table": { + "name": "postgres_string_types" + }, "type": { "name": "text" - } + }, + "originalName": "c_text" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_network_types", - "name": "TruncatePostgresNetworkTypes", + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", - "name": "GetPostgresNetworkTypesCnt", + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" }, { "name": "cnt", @@ -34149,488 +34467,295 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", - "name": "InsertPostgresNetworkTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 2, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 3, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_network_types" - } - }, - { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 2, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 3, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 4, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 5, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 6, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - } - ], - "comments": [ - " Unstructured types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_unstructured_types" - } - }, - { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", "cmd": ":one", "columns": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", + "name": "c_text", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "json" + "name": "text" }, - "originalName": "c_json_string_override" + "originalName": "c_text" }, { - "name": "c_jsonb", + "name": "query", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonb" + "name": "tsquery" }, - "originalName": "c_jsonb" + "originalName": "query" }, { - "name": "c_jsonpath", + "name": "tsv", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonpath" + "name": "tsvector" }, - "originalName": "c_jsonpath" + "originalName": "tsv" }, { - "name": "c_xml", + "name": "rnk", + "notNull": true, "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, + "isFuncCall": true, "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, + "name": "real" + } + } + ], + "parameters": [ { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresArrayTypes", + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.bool" + "name": "pg_catalog.time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" } }, { "number": 4, - "column": { - "name": "c_integer_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer_array", - "arrayDims": 1 - } - }, - { - "number": 5, - "column": { - "name": "c_decimal_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal_array", - "arrayDims": 1 - } - }, - { - "number": 6, - "column": { - "name": "c_date_array", - "isArray": true, + "column": { + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "date" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" } }, { - "number": 7, + "number": 5, "column": { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.interval" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 + "originalName": "c_interval" } } ], "comments": [ - " Array types " + " DateTime types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" } }, { - "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", - "name": "GetPostgresArrayTypes", + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", "cmd": ":one", "columns": [ { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" }, { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "bool" + "name": "time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" }, { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" }, { - "name": "c_integer_array", - "isArray": true, + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "int4" + "name": "timestamptz" }, - "originalName": "c_integer_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" }, { - "name": "c_decimal_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "interval" }, - "originalName": "c_decimal_array", - "arrayDims": 1 - }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_date_array", - "isArray": true, + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "name": "date" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_date" }, { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", "name": "timestamp" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", - "name": "InsertPostgresArrayTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + "originalName": "c_timestamp" + }, { - "number": 1, - "column": { - "name": "c_bytea", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "bytea" - }, - "originalName": "c_bytea" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_array_types" - } - }, - { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", - "name": "GetPostgresArrayTypesCnt", - "cmd": ":one", - "columns": [ + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, { - "name": "c_bytea", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "schema": "pg_catalog", + "name": "interval" }, - "originalName": "c_bytea" + "originalName": "c_interval" }, { "name": "cnt", @@ -34645,1218 +34770,1093 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_array_types", - "name": "TruncatePostgresArrayTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "point" + "name": "date" }, - "originalName": "c_point" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "line" + "name": "pg_catalog.time" }, - "originalName": "c_line" + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "lseg" + "name": "pg_catalog.timestamp" }, - "originalName": "c_lseg" + "originalName": "c_timestamp" } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "box" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_box" + "originalName": "c_timestamp_with_tz" } }, { "number": 5, "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "circle" + "name": "pg_catalog.interval" }, - "originalName": "c_circle" + "originalName": "c_interval" } } ], - "comments": [ - " Geometric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" } }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_cidr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "point" + "name": "cidr" }, - "originalName": "c_point" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_inet", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "line" + "name": "inet" }, - "originalName": "c_line" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_macaddr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "lseg" + "name": "macaddr" }, - "originalName": "c_lseg" + "originalName": "c_macaddr" } }, { "number": 4, "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_macaddr8", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "macaddr8" + } } } ], + "comments": [ + " Network types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_network_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - }, - { - "name": "c_lseg", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - }, - { - "name": "c_box", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - }, - { - "name": "c_path", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - }, - { - "name": "c_polygon", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - }, - { - "name": "c_circle", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } + "originalName": "c_macaddr" }, { - "number": 2, - "column": { - "name": "limit", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_network_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "json" + } } }, { "number": 2, "column": { - "name": "bio", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "json" + } + } + }, + { + "number": 3, + "column": { + "name": "c_jsonb", + "length": -1, + "type": { + "name": "jsonb" + } + } + }, + { + "number": 4, + "column": { + "name": "c_jsonpath", + "length": -1, + "type": { + "name": "jsonpath" + } + } + }, + { + "number": 5, + "column": { + "name": "c_xml", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 6, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", + "name": "c_jsonb", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "jsonb" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_jsonb" + }, { - "name": "id", - "notNull": true, + "name": "c_jsonpath", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "jsonpath" }, - "originalName": "id" + "originalName": "c_jsonpath" }, { - "name": "name", - "notNull": true, + "name": "c_xml", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "name" + "originalName": "c_xml" }, { - "name": "bio", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "bio" + "originalName": "c_xml_string_override" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresArrayTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "c_bytea", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "bytea" }, - "originalName": "name" + "originalName": "c_bytea" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean_array", + "arrayDims": 1 } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", + "name": "c_text_array", + "isArray": true, "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_array_types" }, "type": { "name": "text" }, - "originalName": "bio" + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_date_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date_array", + "arrayDims": 1 + } + }, + { + "number": 7, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 } } ], - "filename": "query.sql" + "comments": [ + " Array types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", + "name": "GetPostgresArrayTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "bytea" + }, + "originalName": "c_bytea" + }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "id" + "originalName": "c_decimal_array", + "arrayDims": 1 }, { - "name": "name", - "notNull": true, + "name": "c_date_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "name" + "originalName": "c_date_array", + "arrayDims": 1 }, { - "name": "bio", + "name": "c_timestamp_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "bio" + "originalName": "c_timestamp_array", + "arrayDims": 1 } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "name": "InsertPostgresArrayTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_bytea", "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bytea" }, - "arrayDims": 1 + "originalName": "c_bytea" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", + "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "name": "GetPostgresArrayTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "name": "bytea" }, - "originalName": "id" + "originalName": "c_bytea" }, { - "name": "name", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_array_types", + "name": "TruncatePostgresArrayTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_point", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "point" }, - "arrayDims": 1 + "originalName": "c_point" } }, { "number": 2, "column": { - "notNull": true, - "isArray": true, + "name": "c_line", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "name": "text" + "name": "line" }, - "arrayDims": 1 + "originalName": "c_line" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "name", - "notNull": true, + "name": "c_lseg", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "lseg" }, - "originalName": "name" + "originalName": "c_lseg" } }, { - "number": 2, + "number": 4, "column": { - "name": "author_id", - "notNull": true, + "name": "c_box", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "box" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_box" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } } ], - "filename": "query.sql" + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "name", - "notNull": true, + "name": "c_box", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "box" }, - "originalName": "name" + "originalName": "c_box" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 5, "column": { - "name": "author_name", - "notNull": true, + "name": "c_path", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "path" }, - "originalName": "author_name" + "originalName": "c_path" } }, { - "number": 2, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_polygon", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "polygon" }, - "originalName": "name" + "originalName": "c_polygon" } }, { - "number": 3, + "number": 7, "column": { - "name": "bio_type", + "name": "c_circle", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "extended.bio_type" + "name": "circle" }, - "originalName": "bio_type" + "originalName": "c_circle" } } ], "filename": "query.sql", "insert_into_table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" } }, { - "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", - "name": "GetFirstExtendedBioByType", + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", "cmd": ":one", "columns": [ { - "name": "author_name", - "notNull": true, + "name": "c_point", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "point" }, - "originalName": "author_name" + "originalName": "c_point" }, { - "name": "name", - "notNull": true, + "name": "c_line", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "line" }, - "originalName": "name" + "originalName": "c_line" }, { - "name": "bio_type", + "name": "c_lseg", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "extended", - "name": "bio_type" + "name": "lseg" }, - "originalName": "bio_type" - } - ], - "parameters": [ + "originalName": "c_lseg" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": -1, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "extended.bio_type" - }, - "originalName": "bio_type" - } + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index e5e217d6..b3f18e8c 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -1,9 +1,19 @@ ╕ 2 -postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbЁ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner■ы public"└public▄ +./dist/LocalRunner■ы public"└publicГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -82,17 +92,7 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtext" +c_circle0         Rpostgres_geometric_typesbcircle" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10221,7 +10221,110 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╧ +bio_type Autobiography BiographyMemoirР +9SELECT id, name, bio FROM authors +WHERE name = $1 LIMIT 1 GetAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*0, +name0         R authorsbtextzname: query.sql║ +CSELECT id, name, bio +FROM authors +ORDER BY name +LIMIT $2 +OFFSET $1 ListAuthors:many"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*&" +offset0         8b integer*%! +limit0         8b integer: query.sqlн +OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*95 +id0         Rpublicauthorsb  bigserialzid*84 +name0         Rpublicauthorsbtextzname*40 +bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ + args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); - public async Task GetPostgresTypes() + private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; + public readonly record struct GetAuthorRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorArgs(string Name); + public async Task GetAuthor(GetAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) + using (var command = connection.CreateCommand(GetAuthorSql)) { + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetAuthorRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -163,26 +74,18 @@ public async Task InsertPostgresTypesBatch(List ar throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new GetAuthorRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -191,92 +94,71 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; - public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, long Cnt); - public async Task GetPostgresTypesCnt() + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); + public readonly record struct ListAuthorsArgs(int Offset, int Limit); + public async Task> ListAuthors(ListAuthorsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) + using (var command = connection.CreateCommand(ListAuthorsSql)) { + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesCntSql; + command.CommandText = ListAuthorsSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; - public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); - public async Task GetPostgresFunctions() + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public readonly record struct CreateAuthorRow(long Id, string Name, string? Bio); + public readonly record struct CreateAuthorArgs(long Id, string Name, string? Bio); + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) + using (var command = connection.CreateCommand(CreateAuthorSql)) { + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresFunctionsRow + return new CreateAuthorRow { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -290,17 +172,20 @@ public async Task InsertPostgresTypesBatch(List ar throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresFunctionsSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresFunctionsRow + return new CreateAuthorRow { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -309,159 +194,143 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public readonly record struct CreateAuthorReturnIdRow(long Id); + public readonly record struct CreateAuthorReturnIdArgs(string Name, string? Bio); + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) { - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresTypesSql; + command.CommandText = CreateAuthorReturnIdSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); } } - private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; - public readonly record struct InsertPostgresStringTypesArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); - public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public readonly record struct GetAuthorByIdRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorByIdArgs(long Id); + public async Task GetAuthorById(GetAuthorByIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByIdSql)) { - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@id", args.Id); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetAuthorByIdRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresStringTypesSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresStringTypesBatchArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); - public async Task InsertPostgresStringTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + command.Parameters.AddWithValue("@id", args.Id); + using (var reader = await command.ExecuteReaderAsync()) { - foreach (var row in args) + if (await reader.ReadAsync()) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + return new GetAuthorByIdRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; } - - await writer.CompleteAsync(); } - - await connection.CloseAsync(); } + + return null; } - private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; - public readonly record struct GetPostgresStringTypesRow(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); - public async Task GetPostgresStringTypes() + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) + using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) { + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesSql; + command.CommandText = GetAuthorByNamePatternSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; - public async Task TruncatePostgresStringTypes() + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public readonly record struct DeleteAuthorArgs(string Name); + public async Task DeleteAuthor(DeleteAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresStringTypesSql)) + using (var command = connection.CreateCommand(DeleteAuthorSql)) { + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -473,340 +342,307 @@ public async Task TruncatePostgresStringTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresStringTypesSql; + command.CommandText = DeleteAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; - public readonly record struct GetPostgresStringTypesCntRow(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText, long Cnt); - public async Task GetPostgresStringTypesCnt() + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) + using (var command = connection.CreateCommand(TruncateAuthorsSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesCntRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4), - Cnt = reader.GetInt64(5) - }; - } - } + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesCntSql; + command.CommandText = TruncateAuthorsSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) + await command.ExecuteNonQueryAsync(); + } + } + + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public readonly record struct UpdateAuthorsArgs(string? Bio); + public async Task UpdateAuthors(UpdateAuthorsArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - if (await reader.ReadAsync()) + using (var command = connection.CreateCommand(UpdateAuthorsSql)) { - return new GetPostgresStringTypesCntRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4), - Cnt = reader.GetInt64(5) - }; + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); } } } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = UpdateAuthorsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); + } } - private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; - public readonly record struct GetPostgresStringTypesTextSearchRow(string? CText, NpgsqlTsQuery Query, NpgsqlTsVector Tsv, float Rnk); - public readonly record struct GetPostgresStringTypesTextSearchArgs(string ToTsquery); - public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; + public readonly record struct GetAuthorsByIdsRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorsByIdsArgs(long[] LongArr1); + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) { - command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesTextSearchRow - { - CText = reader.IsDBNull(0) ? null : reader.GetString(0), - Query = reader.GetFieldValue(1), - Tsv = reader.GetFieldValue(2), - Rnk = reader.GetFloat(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesTextSearchSql; + command.CommandText = GetAuthorsByIdsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesTextSearchRow - { - CText = reader.IsDBNull(0) ? null : reader.GetString(0), - Query = reader.GetFieldValue(1), - Tsv = reader.GetFieldValue(2), - Rnk = reader.GetFloat(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } - - return null; } - private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; - public readonly record struct InsertPostgresDateTimeTypesArgs(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); - public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; + public readonly record struct GetAuthorsByIdsAndNamesRow(long Id, string Name, string? Bio); + public readonly record struct GetAuthorsByIdsAndNamesArgs(long[] LongArr1, string[] StringArr2); + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) { - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresDateTimeTypesSql; + command.CommandText = GetAuthorsByIdsAndNamesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; - public readonly record struct GetPostgresDateTimeTypesRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); - public async Task GetPostgresDateTimeTypes() + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public readonly record struct CreateBookRow(Guid Id); + public readonly record struct CreateBookArgs(string Name, long AuthorId); + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(CreateBookSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesRow - { - CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) - }; - } - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresDateTimeTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesRow - { - CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) - }; - } - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } - - return null; } - private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; - public async Task TruncatePostgresDateTimeTypes() + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresDateTimeTypesSql; + command.CommandText = ListAllAuthorsBooksSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; - public readonly record struct GetPostgresDateTimeTypesCntRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, long Cnt); - public async Task GetPostgresDateTimeTypesCnt() + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesCntRow - { - CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - Cnt = reader.GetInt64(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresDateTimeTypesCntSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesCntRow - { - CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - Cnt = reader.GetInt64(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresDateTimeTypesBatchArgs(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); - public async Task InsertPostgresDateTimeTypesBatch(List args) + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); + public readonly record struct GetAuthorsByBookNameArgs(string Name); + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - foreach (var row in args) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorsByBookNameSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; - public readonly record struct InsertPostgresNetworkTypesArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); - public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public readonly record struct CreateExtendedBioArgs(string AuthorName, string Name, ExtendedBioType? BioType); + public async Task CreateExtendedBio(CreateExtendedBioArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) + using (var command = connection.CreateCommand(CreateExtendedBioSql)) { - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -818,36 +654,36 @@ public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresNetworkTypesSql; + command.CommandText = CreateExtendedBioSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; - public readonly record struct GetPostgresNetworkTypesRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); - public async Task GetPostgresNetworkTypes() + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public readonly record struct GetFirstExtendedBioByTypeRow(string AuthorName, string Name, ExtendedBioType? BioType); + public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBioType? BioType); + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) + using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) { + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesRow + return new GetFirstExtendedBioByTypeRow { - CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() }; } } @@ -861,18 +697,18 @@ public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresNetworkTypesSql; + command.CommandText = GetFirstExtendedBioByTypeSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesRow + return new GetFirstExtendedBioByTypeRow { - CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() }; } } @@ -881,14 +717,14 @@ public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args return null; } - private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; - public async Task TruncatePostgresNetworkTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) + using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) { await command.ExecuteNonQueryAsync(); } @@ -901,32 +737,122 @@ public async Task TruncatePostgresNetworkTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresNetworkTypesSql; + command.CommandText = TruncateExtendedBiosSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; - public readonly record struct GetPostgresNetworkTypesCntRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); - public async Task GetPostgresNetworkTypesCnt() + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) + using (var command = connection.CreateCommand(InsertPostgresTypesSql)) + { + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid); + public async Task InsertPostgresTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); + public async Task GetPostgresTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesCntRow + return new GetPostgresTypesRow { - CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - Cnt = reader.GetInt64(3) + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() }; } } @@ -940,18 +866,26 @@ public async Task TruncatePostgresNetworkTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresNetworkTypesCntSql; + command.CommandText = GetPostgresTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesCntRow + return new GetPostgresTypesRow { - CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - Cnt = reader.GetInt64(3) + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() }; } } @@ -960,96 +894,92 @@ public async Task TruncatePostgresNetworkTypes() return null; } - private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresNetworkTypesBatchArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); - public async Task InsertPostgresNetworkTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; - public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, long Cnt); + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) { - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), + CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresUnstructuredTypesSql; + command.CommandText = GetPostgresTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), + CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) + }; + } + } } + + return null; } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); - public async Task GetPostgresUnstructuredTypes() + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow - { - CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + return new GetPostgresFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -1063,25 +993,17 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetPostgresFunctionsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresFunctionsRow { - CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -1090,14 +1012,14 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -1110,29 +1032,27 @@ public async Task TruncatePostgresUnstructuredTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresUnstructuredTypesSql; + command.CommandText = TruncatePostgresTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; - public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public readonly record struct InsertPostgresStringTypesArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) { - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1144,91 +1064,34 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresArrayTypesSql; + command.CommandText = InsertPostgresStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public readonly record struct GetPostgresArrayTypesRow(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); - public async Task GetPostgresArrayTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) - { - using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetPostgresArrayTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresArrayTypesRow - { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) - }; - } - } - } - - return null; - } - - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea); - public async Task InsertPostgresArrayTypesBatch(List args) + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresStringTypesBatchArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task InsertPostgresStringTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) { foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); } await writer.CompleteAsync(); @@ -1238,24 +1101,27 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public readonly record struct GetPostgresStringTypesRow(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); + public async Task GetPostgresStringTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresArrayTypesCntRow + return new GetPostgresStringTypesRow { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) }; } } @@ -1269,16 +1135,19 @@ public async Task InsertPostgresArrayTypesBatch(List(0), - Cnt = reader.GetInt64(1) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) }; } } @@ -1287,14 +1156,14 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresStringTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = GetPostgresStringTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresGeoTypesBatchArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + using (var reader = await command.ExecuteReaderAsync()) { - foreach (var row in args) + if (await reader.ReadAsync()) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + return new GetPostgresStringTypesCntRow + { + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) + }; } - - await writer.CompleteAsync(); } - - await connection.CloseAsync(); } + + return null; } - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); - public async Task GetPostgresGeoTypes() + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public readonly record struct GetPostgresStringTypesTextSearchRow(string? CText, NpgsqlTsQuery Query, NpgsqlTsVector Tsv, float Rnk); + public readonly record struct GetPostgresStringTypesTextSearchArgs(string ToTsquery); + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) { + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresStringTypesTextSearchRow { - CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) }; } } @@ -1418,21 +1274,19 @@ public async Task InsertPostgresGeoTypesBatch(List(0), - CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) }; } } @@ -1441,15 +1295,21 @@ public async Task InsertPostgresGeoTypesBatch(List GetAuthor(GetAuthorArgs args) + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public readonly record struct GetPostgresDateTimeTypesRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); + public async Task GetPostgresDateTimeTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow + return new GetPostgresDateTimeTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) }; } } @@ -1501,18 +1366,19 @@ public async Task TruncatePostgresGeoTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; + command.CommandText = GetPostgresDateTimeTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow + return new GetPostgresDateTimeTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4) }; } } @@ -1521,71 +1387,54 @@ public async Task TruncatePostgresGeoTypes() return null; } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public readonly record struct ListAuthorsRow(long Id, string Name, string? Bio); - public readonly record struct ListAuthorsArgs(int Offset, int Limit); - public async Task> ListAuthors(ListAuthorsArgs args) + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(ListAuthorsSql)) + using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) { - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAuthorsSql; + command.CommandText = TruncatePostgresDateTimeTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public readonly record struct CreateAuthorRow(long Id, string Name, string? Bio); - public readonly record struct CreateAuthorArgs(long Id, string Name, string? Bio); - public async Task CreateAuthor(CreateAuthorArgs args) + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public readonly record struct GetPostgresDateTimeTypesCntRow(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval, long Cnt); + public async Task GetPostgresDateTimeTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(CreateAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresDateTimeTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) }; } } @@ -1599,20 +1448,20 @@ public async Task> ListAuthors(ListAuthorsArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorSql; + command.CommandText = GetPostgresDateTimeTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresDateTimeTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CDate = reader.IsDBNull(0) ? null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) }; } } @@ -1621,143 +1470,129 @@ public async Task> ListAuthors(ListAuthorsArgs args) return null; } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public readonly record struct CreateAuthorReturnIdRow(long Id); - public readonly record struct CreateAuthorReturnIdArgs(string Name, string? Bio); - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresDateTimeTypesBatchArgs(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); + public async Task InsertPostgresDateTimeTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) { - using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) + foreach (var row in args) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); } + + await writer.CompleteAsync(); } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateAuthorReturnIdSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); + await connection.CloseAsync(); } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public readonly record struct GetAuthorByIdRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorByIdArgs(long Id); - public async Task GetAuthorById(GetAuthorByIdArgs args) + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public readonly record struct InsertPostgresNetworkTypesArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorByIdSql)) + using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@id", args.Id); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorByIdRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = InsertPostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetAuthorByIdRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; - } - } + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public readonly record struct GetAuthorByNamePatternRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorByNamePatternArgs(string? NamePattern); - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public readonly record struct GetPostgresNetworkTypesRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); + public async Task GetPostgresNetworkTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) + using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByNamePatternSql; + command.CommandText = GetPostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + }; + } } } + + return null; } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public readonly record struct DeleteAuthorArgs(string Name); - public async Task DeleteAuthor(DeleteAuthorArgs args) + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(DeleteAuthorSql)) + using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -1769,307 +1604,435 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = TruncatePostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public readonly record struct GetPostgresNetworkTypesCntRow(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, long Cnt); + public async Task GetPostgresNetworkTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateAuthorsSql; + command.CommandText = GetPostgresNetworkTypesCntSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNetworkTypesCntRow + { + CCidr = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) + }; + } + } } + + return null; } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public readonly record struct UpdateAuthorsArgs(string? Bio); - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresNetworkTypesBatchArgs(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr); + public async Task InsertPostgresNetworkTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; + public readonly record struct InsertPostgresUnstructuredTypesArgs(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(UpdateAuthorsSql)) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = UpdateAuthorsSql; + command.CommandText = InsertPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; - public readonly record struct GetAuthorsByIdsRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorsByIdsArgs(long[] LongArr1); - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; + public readonly record struct GetPostgresUnstructuredTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride); + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsSql; + command.CommandText = GetPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } } } + + return null; } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; - public readonly record struct GetAuthorsByIdsAndNamesRow(long Id, string Name, string? Bio); - public readonly record struct GetAuthorsByIdsAndNamesArgs(long[] LongArr1, string[] StringArr2); - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) + using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsAndNamesSql; + command.CommandText = TruncatePostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public readonly record struct CreateBookRow(Guid Id); - public readonly record struct CreateBookArgs(string Name, long AuthorId); - public async Task CreateBook(CreateBookArgs args) + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; + public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(CreateBookSql)) + using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = InsertPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public readonly record struct ListAllAuthorsBooksRow(Author? Author, Book? Book); - public async Task> ListAllAuthorsBooks() + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public readonly record struct GetPostgresArrayTypesRow(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAllAuthorsBooksSql; + command.CommandText = GetPostgresArrayTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } + + return null; } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public readonly record struct GetDuplicateAuthorsRow(Author? Author, Author? Author2); - public async Task> GetDuplicateAuthors() + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea); + public async Task InsertPostgresArrayTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea, COUNT(*) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1"; + public readonly record struct GetPostgresArrayTypesCntRow(byte[]? CBytea, long Cnt); + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetPostgresArrayTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public readonly record struct GetAuthorsByBookNameRow(long Id, string Name, string? Bio, Book? Book); - public readonly record struct GetAuthorsByBookNameArgs(string Name); - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = TruncatePostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; - public readonly record struct CreateExtendedBioArgs(string AuthorName, string Name, ExtendedBioType? BioType); - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; + public readonly record struct InsertPostgresGeoTypesArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(CreateExtendedBioSql)) + using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@author_name", args.AuthorName); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -2081,36 +2044,70 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateExtendedBioSql; + command.CommandText = InsertPostgresGeoTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@author_name", args.AuthorName); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public readonly record struct GetFirstExtendedBioByTypeRow(string AuthorName, string Name, ExtendedBioType? BioType); - public readonly record struct GetFirstExtendedBioByTypeArgs(ExtendedBioType? BioType); - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresGeoTypesBatchArgs(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task InsertPostgresGeoTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public readonly record struct GetPostgresGeoTypesRow(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetPostgresGeoTypesRow { - AuthorName = reader.GetString(0), - Name = reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) }; } } @@ -2124,18 +2121,21 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetFirstExtendedBioByTypeSql; + command.CommandText = GetPostgresGeoTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetPostgresGeoTypesRow { - AuthorName = reader.GetString(0), - Name = reader.GetString(1), - BioType = reader.IsDBNull(2) ? null : reader.GetString(2).ToExtendedBioType() + CPoint = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) }; } } @@ -2144,14 +2144,14 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) return null; } - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -2164,7 +2164,7 @@ public async Task TruncateExtendedBios() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateExtendedBiosSql; + command.CommandText = TruncatePostgresGeoTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index d9a4334f..58dec347 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/types/schema.sql", - "examples/config/postgresql/authors/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/types/query.sql", - "examples/config/postgresql/authors/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlExample", @@ -25,6 +25,96 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "postgres_types" @@ -565,96 +655,6 @@ } } ] - }, - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] } ], "enums": [ @@ -32575,1303 +32575,1521 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 4, + "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int4" + "name": "text" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_bigint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "offset", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_numeric", + "name": "limit", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" + "name": "integer" + } } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 9, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "id", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "bigserial" }, - "originalName": "c_double_precision" + "originalName": "id" } }, { - "number": 10, + "number": 2, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } }, { - "number": 11, + "number": 3, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" - } - }, - { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "originalName": "bio" } } ], - "comments": [ - " Basic types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "authors" } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.bool" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } }, { "number": 2, "column": { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int2" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "bio" } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_numeric", + "name": "id", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.numeric" + "name": "bigserial" }, - "originalName": "c_numeric" + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 9, + "number": 1, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 10, + "number": 1, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" + "originalName": "bio" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { - "name": "c_boolean", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bool" + "name": "bigserial" }, - "originalName": "c_boolean" + "originalName": "id" }, { - "name": "c_bit", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bit" + "name": "text" }, - "originalName": "c_bit" + "originalName": "name" }, { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int2" + "name": "text" }, - "originalName": "c_smallint" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_integer", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int4" - }, - "originalName": "c_integer" - }, + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bigserial" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_numeric", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "numeric" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "float8" - }, - "originalName": "c_double_precision" - }, + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "name": "c_money", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "books" }, "type": { - "name": "money" + "name": "uuid" }, - "originalName": "c_money" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_uuid", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_enum", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" + "type": {}, + "embedTable": { + "name": "books" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "c_smallint", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int2" - }, - "originalName": "c_smallint" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int4" + "name": "bigserial" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_bigint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "text" }, - "originalName": "c_bigint" + "originalName": "name" }, { - "name": "c_real", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "bio" }, { - "name": "c_numeric", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_numeric" + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { "schema": "pg_catalog", - "name": "float8" + "name": "varchar" }, - "originalName": "c_double_precision" + "originalName": "author_name" }, { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "money" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_money" + "originalName": "name" }, { - "name": "c_uuid", + "name": "bio_type", "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "uuid" + "schema": "extended", + "name": "bio_type" }, - "originalName": "c_uuid" - }, + "originalName": "bio_type" + } + ], + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_real", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_decimal", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" + } + }, + { + "number": 11, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 12, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.bool" }, - "originalName": "c_char" + "originalName": "c_boolean" } }, { "number": 2, "column": { - "name": "c_varchar", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int2" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 3, "column": { - "name": "c_character_varying", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int4" }, - "originalName": "c_character_varying" + "originalName": "c_integer" } }, { "number": 4, "column": { - "name": "c_bpchar", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "pg_catalog.int8" }, - "originalName": "c_bpchar" + "originalName": "c_bigint" } }, { "number": 5, "column": { - "name": "c_text", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "c_text" + "originalName": "c_real" } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_string_types" - } - }, - { - "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "c_char", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_char" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_varchar", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_varchar" + "originalName": "c_decimal" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_character_varying", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.float8" }, - "originalName": "c_character_varying" + "originalName": "c_double_precision" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_bpchar", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "money" }, - "originalName": "c_bpchar" + "originalName": "c_money" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" }, - "originalName": "c_text" + "originalName": "c_uuid" } } ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_string_types" + "name": "postgres_types" } }, { - "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", - "name": "GetPostgresStringTypes", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", "cmd": ":one", "columns": [ { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "bool" }, - "originalName": "c_char" + "originalName": "c_boolean" }, { - "name": "c_varchar", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "bit" }, - "originalName": "c_varchar" + "originalName": "c_bit" }, { - "name": "c_character_varying", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "int2" }, - "originalName": "c_character_varying" + "originalName": "c_smallint" }, { - "name": "c_bpchar", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "int4" }, - "originalName": "c_bpchar" + "originalName": "c_integer" }, { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_string_types", - "name": "TruncatePostgresStringTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", - "name": "GetPostgresStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_char", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "int8" }, - "originalName": "c_char" + "originalName": "c_bigint" }, { - "name": "c_varchar", + "name": "c_real", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "float4" }, - "originalName": "c_varchar" + "originalName": "c_real" }, { - "name": "c_character_varying", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "numeric" }, - "originalName": "c_character_varying" + "originalName": "c_numeric" }, { - "name": "c_bpchar", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_bpchar" + "originalName": "c_decimal" }, { - "name": "c_text", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "float8" }, - "originalName": "c_text" + "originalName": "c_double_precision" }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, - { - "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", - "name": "GetPostgresStringTypesTextSearch", - "cmd": ":one", - "columns": [ - { - "name": "c_text", + "name": "c_money", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "money" }, - "originalName": "c_text" + "originalName": "c_money" }, { - "name": "query", - "notNull": true, + "name": "c_uuid", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsquery" + "name": "uuid" }, - "originalName": "query" + "originalName": "c_uuid" }, { - "name": "tsv", - "notNull": true, + "name": "c_enum", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsvector" + "name": "c_enum" }, - "originalName": "tsv" - }, - { - "name": "rnk", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "real" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "to_tsquery", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } - } + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 2, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 3, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 4, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 5, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - } - ], - "comments": [ - " DateTime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_datetime_types" - } - }, - { - "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", - "name": "GetPostgresDateTimeTypes", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_date", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "int2" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_time", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "bool" }, - "originalName": "c_time" + "originalName": "c_boolean" }, { - "name": "c_timestamp", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "int4" }, - "originalName": "c_timestamp" + "originalName": "c_integer" }, { - "name": "c_timestamp_with_tz", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "int8" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bigint" }, { - "name": "c_interval", + "name": "c_real", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "float4" }, - "originalName": "c_interval" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_datetime_types", - "name": "TruncatePostgresDateTimeTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", - "name": "GetPostgresDateTimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_real" + }, { - "name": "c_date", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_time", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "numeric" }, - "originalName": "c_time" + "originalName": "c_decimal" }, { - "name": "c_timestamp", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "float8" }, - "originalName": "c_timestamp" + "originalName": "c_double_precision" }, { - "name": "c_timestamp_with_tz", + "name": "c_money", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "timestamptz" + "name": "money" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_money" }, { - "name": "c_interval", + "name": "c_uuid", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "interval" + "name": "uuid" }, - "originalName": "c_interval" + "originalName": "c_uuid" }, { "name": "cnt", @@ -33879,262 +34097,362 @@ "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_date", + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "date" + "name": "pg_catalog.bpchar" }, - "originalName": "c_date" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_time", + "name": "c_varchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.time" + "name": "pg_catalog.varchar" }, - "originalName": "c_time" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_timestamp", + "name": "c_character_varying", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.varchar" }, - "originalName": "c_timestamp" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_timestamp_with_tz", + "name": "c_bpchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamptz" + "name": "bpchar" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bpchar" } }, { "number": 5, "column": { - "name": "c_interval", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.interval" + "name": "text" }, - "originalName": "c_interval" + "originalName": "c_text" } } ], + "comments": [ + " String types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_datetime_types" + "name": "postgres_string_types" } }, { - "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", - "name": "InsertPostgresNetworkTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_cidr", + "name": "c_char", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "pg_catalog.bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_inet", + "name": "c_varchar", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "pg_catalog.varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "name": "pg_catalog.varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_macaddr8", + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, "type": { - "name": "macaddr8" - } + "name": "text" + }, + "originalName": "c_text" } } ], - "comments": [ - " Network types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_network_types" + "name": "postgres_string_types" } }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", - "name": "GetPostgresNetworkTypes", + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" }, { - "name": "c_macaddr8", - "notNull": true, + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", "length": -1, + "table": { + "name": "postgres_string_types" + }, "type": { "name": "text" - } + }, + "originalName": "c_text" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_network_types", - "name": "TruncatePostgresNetworkTypes", + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", - "name": "GetPostgresNetworkTypesCnt", + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" }, { "name": "cnt", @@ -34149,488 +34467,295 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", - "name": "InsertPostgresNetworkTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 2, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 3, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_network_types" - } - }, - { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 2, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 3, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 4, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 5, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 6, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - } - ], - "comments": [ - " Unstructured types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_unstructured_types" - } - }, - { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", "cmd": ":one", "columns": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", + "name": "c_text", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "json" + "name": "text" }, - "originalName": "c_json_string_override" + "originalName": "c_text" }, { - "name": "c_jsonb", + "name": "query", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonb" + "name": "tsquery" }, - "originalName": "c_jsonb" + "originalName": "query" }, { - "name": "c_jsonpath", + "name": "tsv", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonpath" + "name": "tsvector" }, - "originalName": "c_jsonpath" + "originalName": "tsv" }, { - "name": "c_xml", + "name": "rnk", + "notNull": true, "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, + "isFuncCall": true, "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, + "name": "real" + } + } + ], + "parameters": [ { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresArrayTypes", + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.bool" + "name": "pg_catalog.time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" } }, { "number": 4, - "column": { - "name": "c_integer_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer_array", - "arrayDims": 1 - } - }, - { - "number": 5, - "column": { - "name": "c_decimal_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal_array", - "arrayDims": 1 - } - }, - { - "number": 6, - "column": { - "name": "c_date_array", - "isArray": true, + "column": { + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "date" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" } }, { - "number": 7, + "number": 5, "column": { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.interval" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 + "originalName": "c_interval" } } ], "comments": [ - " Array types " + " DateTime types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" } }, { - "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", - "name": "GetPostgresArrayTypes", + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", "cmd": ":one", "columns": [ { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" }, { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "bool" + "name": "time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" }, { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" }, { - "name": "c_integer_array", - "isArray": true, + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "int4" + "name": "timestamptz" }, - "originalName": "c_integer_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" }, { - "name": "c_decimal_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "interval" }, - "originalName": "c_decimal_array", - "arrayDims": 1 - }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_date_array", - "isArray": true, + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "name": "date" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_date" }, { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", "name": "timestamp" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", - "name": "InsertPostgresArrayTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + "originalName": "c_timestamp" + }, { - "number": 1, - "column": { - "name": "c_bytea", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "bytea" - }, - "originalName": "c_bytea" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_array_types" - } - }, - { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", - "name": "GetPostgresArrayTypesCnt", - "cmd": ":one", - "columns": [ + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, { - "name": "c_bytea", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "schema": "pg_catalog", + "name": "interval" }, - "originalName": "c_bytea" + "originalName": "c_interval" }, { "name": "cnt", @@ -34645,1218 +34770,1093 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_array_types", - "name": "TruncatePostgresArrayTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "point" + "name": "date" }, - "originalName": "c_point" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "line" + "name": "pg_catalog.time" }, - "originalName": "c_line" + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "lseg" + "name": "pg_catalog.timestamp" }, - "originalName": "c_lseg" + "originalName": "c_timestamp" } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "box" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_box" + "originalName": "c_timestamp_with_tz" } }, { "number": 5, "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "circle" + "name": "pg_catalog.interval" }, - "originalName": "c_circle" + "originalName": "c_interval" } } ], - "comments": [ - " Geometric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" } }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_cidr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "point" + "name": "cidr" }, - "originalName": "c_point" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_inet", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "line" + "name": "inet" }, - "originalName": "c_line" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_macaddr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "lseg" + "name": "macaddr" }, - "originalName": "c_lseg" + "originalName": "c_macaddr" } }, { "number": 4, "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_macaddr8", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "macaddr8" + } } } ], + "comments": [ + " Network types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_network_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - }, - { - "name": "c_lseg", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - }, - { - "name": "c_box", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - }, - { - "name": "c_path", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - }, - { - "name": "c_polygon", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - }, - { - "name": "c_circle", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } + "originalName": "c_macaddr" }, { - "number": 2, - "column": { - "name": "limit", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_network_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "json" + } } }, { "number": 2, "column": { - "name": "bio", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "json" + } + } + }, + { + "number": 3, + "column": { + "name": "c_jsonb", + "length": -1, + "type": { + "name": "jsonb" + } + } + }, + { + "number": 4, + "column": { + "name": "c_jsonpath", + "length": -1, + "type": { + "name": "jsonpath" + } + } + }, + { + "number": 5, + "column": { + "name": "c_xml", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 6, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", + "name": "c_jsonb", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "jsonb" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_jsonb" + }, { - "name": "id", - "notNull": true, + "name": "c_jsonpath", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "jsonpath" }, - "originalName": "id" + "originalName": "c_jsonpath" }, { - "name": "name", - "notNull": true, + "name": "c_xml", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "name" + "originalName": "c_xml" }, { - "name": "bio", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "bio" + "originalName": "c_xml_string_override" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresArrayTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "c_bytea", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "bytea" }, - "originalName": "name" + "originalName": "c_bytea" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean_array", + "arrayDims": 1 } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", + "name": "c_text_array", + "isArray": true, "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_array_types" }, "type": { "name": "text" }, - "originalName": "bio" + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_date_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date_array", + "arrayDims": 1 + } + }, + { + "number": 7, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 } } ], - "filename": "query.sql" + "comments": [ + " Array types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", + "name": "GetPostgresArrayTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "bytea" + }, + "originalName": "c_bytea" + }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "id" + "originalName": "c_decimal_array", + "arrayDims": 1 }, { - "name": "name", - "notNull": true, + "name": "c_date_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "name" + "originalName": "c_date_array", + "arrayDims": 1 }, { - "name": "bio", + "name": "c_timestamp_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "bio" + "originalName": "c_timestamp_array", + "arrayDims": 1 } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "name": "InsertPostgresArrayTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_bytea", "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bytea" }, - "arrayDims": 1 + "originalName": "c_bytea" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", + "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "name": "GetPostgresArrayTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "name": "bytea" }, - "originalName": "id" + "originalName": "c_bytea" }, { - "name": "name", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_array_types", + "name": "TruncatePostgresArrayTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_point", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "point" }, - "arrayDims": 1 + "originalName": "c_point" } }, { "number": 2, "column": { - "notNull": true, - "isArray": true, + "name": "c_line", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "name": "text" + "name": "line" }, - "arrayDims": 1 + "originalName": "c_line" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "name", - "notNull": true, + "name": "c_lseg", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "lseg" }, - "originalName": "name" + "originalName": "c_lseg" } }, { - "number": 2, + "number": 4, "column": { - "name": "author_id", - "notNull": true, + "name": "c_box", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "box" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_box" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } } ], - "filename": "query.sql" + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "name", - "notNull": true, + "name": "c_box", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "box" }, - "originalName": "name" + "originalName": "c_box" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 5, "column": { - "name": "author_name", - "notNull": true, + "name": "c_path", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "path" }, - "originalName": "author_name" + "originalName": "c_path" } }, { - "number": 2, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_polygon", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "polygon" }, - "originalName": "name" + "originalName": "c_polygon" } }, { - "number": 3, + "number": 7, "column": { - "name": "bio_type", + "name": "c_circle", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "extended.bio_type" + "name": "circle" }, - "originalName": "bio_type" + "originalName": "c_circle" } } ], "filename": "query.sql", "insert_into_table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" } }, { - "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", - "name": "GetFirstExtendedBioByType", + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", "cmd": ":one", "columns": [ { - "name": "author_name", - "notNull": true, + "name": "c_point", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "point" }, - "originalName": "author_name" + "originalName": "c_point" }, { - "name": "name", - "notNull": true, + "name": "c_line", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "line" }, - "originalName": "name" + "originalName": "c_line" }, { - "name": "bio_type", + "name": "c_lseg", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "extended", - "name": "bio_type" + "name": "lseg" }, - "originalName": "bio_type" - } - ], - "parameters": [ + "originalName": "c_lseg" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": -1, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "extended.bio_type" - }, - "originalName": "bio_type" - } + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index f8bd06f2..6c5abf85 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -1,9 +1,19 @@ Щ 2 -postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlb╤ +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunner■ы public"└public▄ +./dist/LocalRunner■ы public"└publicГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -82,17 +92,7 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtext" +c_circle0         Rpostgres_geometric_typesbcircle" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10221,7 +10221,110 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╧ +bio_type Autobiography BiographyMemoirР +9SELECT id, name, bio FROM authors +WHERE name = $1 LIMIT 1 GetAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*0, +name0         R authorsbtextzname: query.sql║ +CSELECT id, name, bio +FROM authors +ORDER BY name +LIMIT $2 +OFFSET $1 ListAuthors:many"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*&" +offset0         8b integer*%! +limit0         8b integer: query.sqlн +OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*95 +id0         Rpublicauthorsb  bigserialzid*84 +name0         Rpublicauthorsbtextzname*40 +bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ + GetAuthor(GetAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresTypesSql)) + using (var command = connection.CreateCommand(GetAuthorSql)) { - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresTypesSql; + command.CommandText = GetAuthorSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetAuthorRow + { + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + }; + } + } } + + return null; } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs + private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; + public class ListAuthorsRow { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task InsertPostgresTypesBatch(List args) + public class ListAuthorsArgs { - using (var connection = new NpgsqlConnection(ConnectionString)) + public int Offset { get; set; } + public int Limit { get; set; } + }; + public async Task> ListAuthors(ListAuthorsArgs args) + { + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(ListAuthorsSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } - - await writer.CompleteAsync(); } + } - await connection.CloseAsync(); + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAuthorsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@offset", args.Offset); + command.Parameters.AddWithValue("@limit", args.Limit); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow + private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; + public class CreateAuthorRow { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresTypes() + public class CreateAuthorArgs + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthor(CreateAuthorArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) + using (var command = connection.CreateCommand(CreateAuthorSql)) { + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new CreateAuthorRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -204,26 +200,20 @@ public async Task GetPostgresTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesSql; + command.CommandText = CreateAuthorSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesRow + return new CreateAuthorRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -232,46 +222,74 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow + private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; + public class CreateAuthorReturnIdRow { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public long Cnt { get; set; } + public long Id { get; set; } }; - public async Task GetPostgresTypesCnt() + public class CreateAuthorReturnIdArgs + { + public string Name { get; set; } + public string Bio { get; set; } + }; + public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) + using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) { + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateAuthorReturnIdSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt64(result); + } + } + + private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; + public class GetAuthorByIdRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorByIdArgs + { + public long Id { get; set; } + }; + public async Task GetAuthorById(GetAuthorByIdArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetAuthorByIdSql)) + { + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesCntRow + return new GetAuthorByIdRow { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -285,25 +303,18 @@ public async Task GetPostgresTypesCnt() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresTypesCntSql; + command.CommandText = GetAuthorByIdSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresTypesCntRow + return new GetAuthorByIdRow { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + Id = reader.GetInt64(0), + Name = reader.GetString(1), + Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }; } } @@ -312,70 +323,94 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; - public class GetPostgresFunctionsRow + private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; + public class GetAuthorByNamePatternRow { - public int? MaxInteger { get; set; } - public string MaxVarchar { get; set; } - public DateTime MaxTimestamp { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } }; - public async Task GetPostgresFunctions() + public class GetAuthorByNamePatternArgs + { + public string NamePattern { get; set; } + }; + public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) + using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) { + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresFunctionsSql; + command.CommandText = GetAuthorByNamePatternSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(DeleteAuthorSql)) { - return new GetPostgresFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - MaxTimestamp = reader.GetDateTime(2) - }; + command.Parameters.AddWithValue("@name", args.Name); + await command.ExecuteNonQueryAsync(); } } + + return; } - return null; + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAuthorSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + await command.ExecuteNonQueryAsync(); + } } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() + private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; + public async Task TruncateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) + using (var command = connection.CreateCommand(TruncateAuthorsSql)) { await command.ExecuteNonQueryAsync(); } @@ -388,329 +423,329 @@ public async Task TruncatePostgresTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresTypesSql; + command.CommandText = TruncateAuthorsSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; - public class InsertPostgresStringTypesArgs + private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; + public class UpdateAuthorsArgs + { + public string Bio { get; set; } + }; + public async Task UpdateAuthors(UpdateAuthorsArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(UpdateAuthorsSql)) + { + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = UpdateAuthorsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); + return await command.ExecuteNonQueryAsync(); + } + } + + private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; + public class GetAuthorsByIdsRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsArgs + { + public long[] LongArr1 { get; set; } + }; + public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) + { + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetAuthorsByIdsSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + + private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; + public class GetAuthorsByIdsAndNamesRow + { + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + }; + public class GetAuthorsByIdsAndNamesArgs { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } + public long[] LongArr1 { get; set; } + public string[] StringArr2 { get; set; } }; - public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) + public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) + using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) { - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresStringTypesSql; + command.CommandText = GetAuthorsByIdsAndNamesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresStringTypesBatchArgs - { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - }; - public async Task InsertPostgresStringTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + command.Parameters.AddWithValue("@longArr_1", args.LongArr1); + command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); + using (var reader = await command.ExecuteReaderAsync()) { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); - await writer.WriteAsync(row.CText ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; } - - await connection.CloseAsync(); } } - private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; - public class GetPostgresStringTypesRow + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } + public Guid Id { get; set; } }; - public async Task GetPostgresStringTypes() + public class CreateBookArgs + { + public string Name { get; set; } + public long AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) + using (var command = connection.CreateCommand(CreateBookSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4) - }; - } - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesSql; + command.CommandText = CreateBookSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4) - }; - } - } + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Guid.Parse(result?.ToString()); } - - return null; } - private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; - public async Task TruncatePostgresStringTypes() + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow + { + public Author Author { get; set; } + public Book Book { get; set; } + }; + public async Task> ListAllAuthorsBooks() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresStringTypesSql)) + using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - - return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresStringTypesSql; + command.CommandText = ListAllAuthorsBooksSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } } } - private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; - public class GetPostgresStringTypesCntRow + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow { - public string CChar { get; set; } - public string CVarchar { get; set; } - public string CCharacterVarying { get; set; } - public string CBpchar { get; set; } - public string CText { get; set; } - public long Cnt { get; set; } + public Author Author { get; set; } + public Author Author2 { get; set; } }; - public async Task GetPostgresStringTypesCnt() + public async Task> GetDuplicateAuthors() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) + using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) { using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesCntRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4), - Cnt = reader.GetInt64(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesCntSql; + command.CommandText = GetDuplicateAuthorsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesCntRow - { - CChar = reader.IsDBNull(0) ? null : reader.GetString(0), - CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), - CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), - CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), - CText = reader.IsDBNull(4) ? null : reader.GetString(4), - Cnt = reader.GetInt64(5) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; } } - - return null; } - private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; - public class GetPostgresStringTypesTextSearchRow + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow { - public string CText { get; set; } - public NpgsqlTsQuery Query { get; set; } - public NpgsqlTsVector Tsv { get; set; } - public float Rnk { get; set; } + public long Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } }; - public class GetPostgresStringTypesTextSearchArgs + public class GetAuthorsByBookNameArgs { - public string ToTsquery { get; set; } + public string Name { get; set; } }; - public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) + using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) { - command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesTextSearchRow - { - CText = reader.IsDBNull(0) ? null : reader.GetString(0), - Query = reader.GetFieldValue(1), - Tsv = reader.GetFieldValue(2), - Rnk = reader.GetFloat(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } } - - return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresStringTypesTextSearchSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - if (await reader.ReadAsync()) - { - return new GetPostgresStringTypesTextSearchRow - { - CText = reader.IsDBNull(0) ? null : reader.GetString(0), - Query = reader.GetFieldValue(1), - Tsv = reader.GetFieldValue(2), - Rnk = reader.GetFloat(3) - }; - } + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; } } - - return null; } - private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; - public class InsertPostgresDateTimeTypesArgs + private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; + public class CreateExtendedBioArgs { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } }; - public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) + public async Task CreateExtendedBio(CreateExtendedBioArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(CreateExtendedBioSql)) { - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -722,45 +757,44 @@ public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs ar throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresDateTimeTypesSql; + command.CommandText = CreateExtendedBioSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@author_name", args.AuthorName); + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; - public class GetPostgresDateTimeTypesRow + private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; + public class GetFirstExtendedBioByTypeRow { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public string AuthorName { get; set; } + public string Name { get; set; } + public ExtendedBioType? BioType { get; set; } }; - public async Task GetPostgresDateTimeTypes() + public class GetFirstExtendedBioByTypeArgs + { + public ExtendedBioType? BioType { get; set; } + }; + public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) { + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresDateTimeTypesRow + return new GetFirstExtendedBioByTypeRow { - CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() }; } } @@ -774,19 +808,18 @@ public async Task GetPostgresDateTimeTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresDateTimeTypesSql; + command.CommandText = GetFirstExtendedBioByTypeSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresDateTimeTypesRow + return new GetFirstExtendedBioByTypeRow { - CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) + AuthorName = reader.GetString(0), + Name = reader.GetString(1), + BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() }; } } @@ -795,14 +828,14 @@ public async Task GetPostgresDateTimeTypes() return null; } - private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; - public async Task TruncatePostgresDateTimeTypes() + private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; + public async Task TruncateExtendedBios() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) + using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) { await command.ExecuteNonQueryAsync(); } @@ -815,177 +848,162 @@ public async Task TruncatePostgresDateTimeTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresDateTimeTypesSql; + command.CommandText = TruncateExtendedBiosSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; - public class GetPostgresDateTimeTypesCntRow + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + public class InsertPostgresTypesArgs { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } - public long Cnt { get; set; } + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; - public async Task GetPostgresDateTimeTypesCnt() + public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) + using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesCntRow - { - CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), - Cnt = reader.GetInt64(5) - }; - } - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - return null; + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresDateTimeTypesCntSql; + command.CommandText = InsertPostgresTypesSql; command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresDateTimeTypesCntRow - { - CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), - CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), - CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), - CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), - CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), - Cnt = reader.GetInt64(5) - }; - } - } + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - return null; } - private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresDateTimeTypesBatchArgs + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresTypesBatchArgs { - public DateTime? CDate { get; set; } - public TimeSpan? CTime { get; set; } - public DateTime? CTimestamp { get; set; } - public DateTime? CTimestampWithTz { get; set; } - public TimeSpan? CInterval { get; set; } + public bool? CBoolean { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } }; - public async Task InsertPostgresDateTimeTypesBatch(List args) + public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) { await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) { foreach (var row in args) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); - await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); - await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); - await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; - public class InsertPostgresNetworkTypesArgs - { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } - }; - public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) - { - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); } - } - return; - } + await writer.CompleteAsync(); + } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertPostgresNetworkTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); + await connection.CloseAsync(); } } - private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; - public class GetPostgresNetworkTypesRow + private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + public class GetPostgresTypesRow { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public string CMacaddr8 { get; set; } + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; - public async Task GetPostgresNetworkTypes() + public async Task GetPostgresTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) + using (var command = connection.CreateCommand(GetPostgresTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesRow + return new GetPostgresTypesRow { - CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() }; } } @@ -999,18 +1017,26 @@ public async Task GetPostgresNetworkTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresNetworkTypesSql; + command.CommandText = GetPostgresTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesRow + return new GetPostgresTypesRow { - CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), + CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() }; } } @@ -1019,58 +1045,110 @@ public async Task GetPostgresNetworkTypes() return null; } - private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; - public async Task TruncatePostgresNetworkTypes() + private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + public class GetPostgresTypesCntRow + { + public short? CSmallint { get; set; } + public bool? CBoolean { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public float? CReal { get; set; } + public decimal? CNumeric { get; set; } + public decimal? CDecimal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public Guid? CUuid { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) + using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), + CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresNetworkTypesSql; + command.CommandText = GetPostgresTypesCntSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresTypesCntRow + { + CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), + CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), + CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), + CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), + CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), + CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), + CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), + CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), + Cnt = reader.GetInt64(10) + }; + } + } } + + return null; } - private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; - public class GetPostgresNetworkTypesCntRow + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + public class GetPostgresFunctionsRow { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - public long Cnt { get; set; } + public int? MaxInteger { get; set; } + public string MaxVarchar { get; set; } + public DateTime MaxTimestamp { get; set; } }; - public async Task GetPostgresNetworkTypesCnt() + public async Task GetPostgresFunctions() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) + using (var command = connection.CreateCommand(GetPostgresFunctionsSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesCntRow + return new GetPostgresFunctionsRow { - CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - Cnt = reader.GetInt64(3) + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -1084,18 +1162,17 @@ public async Task GetPostgresNetworkTypesCnt() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresNetworkTypesCntSql; + command.CommandText = GetPostgresFunctionsSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresNetworkTypesCntRow + return new GetPostgresFunctionsRow { - CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), - CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - Cnt = reader.GetInt64(3) + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + MaxTimestamp = reader.GetDateTime(2) }; } } @@ -1104,59 +1181,54 @@ public async Task GetPostgresNetworkTypesCnt() return null; } - private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresNetworkTypesBatchArgs - { - public NpgsqlCidr? CCidr { get; set; } - public IPAddress CInet { get; set; } - public PhysicalAddress CMacaddr { get; set; } - }; - public async Task InsertPostgresNetworkTypesBatch(List args) + private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; + public async Task TruncatePostgresTypes() { - using (var connection = new NpgsqlConnection(ConnectionString)) + if (this.Transaction == null) { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) + using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - foreach (var row in args) + using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } - - await writer.CompleteAsync(); } - await connection.CloseAsync(); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; - public class InsertPostgresUnstructuredTypesArgs + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; + public class InsertPostgresStringTypesArgs { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } }; - public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) + public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(InsertPostgresStringTypesSql)) { - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1168,53 +1240,78 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresUnstructuredTypesSql; + command.CommandText = InsertPostgresStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); - command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_char", args.CChar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_varchar", args.CVarchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_character_varying", args.CCharacterVarying ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bpchar", args.CBpchar ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public class GetPostgresUnstructuredTypesRow + private const string InsertPostgresStringTypesBatchSql = "COPY postgres_string_types (c_char, c_varchar, c_character_varying, c_bpchar, c_text) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresStringTypesBatchArgs { - public JsonElement? CJson { get; set; } - public string CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public string CXmlStringOverride { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } }; - public async Task GetPostgresUnstructuredTypes() + public async Task InsertPostgresStringTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresStringTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CChar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CVarchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCharacterVarying ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBpchar ?? (object)DBNull.Value); + await writer.WriteAsync(row.CText ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresStringTypesSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1"; + public class GetPostgresStringTypesRow + { + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + }; + public async Task GetPostgresStringTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresStringTypesRow { - CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) }; } } @@ -1228,25 +1325,19 @@ public async Task GetPostgresUnstructuredTypes( throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetPostgresStringTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresStringTypesRow { - CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), - CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), - CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), - CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), - CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => - { - var xmlDoc = new XmlDocument(); - xmlDoc.LoadXml(r.GetString(o)); - return xmlDoc; - }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4) }; } } @@ -1255,58 +1346,15 @@ public async Task GetPostgresUnstructuredTypes( return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncatePostgresUnstructuredTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; - public class InsertPostgresArrayTypesArgs - { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } - }; - public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) + private const string TruncatePostgresStringTypesSql = "TRUNCATE TABLE postgres_string_types"; + public async Task TruncatePostgresStringTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresStringTypesSql)) { - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1318,51 +1366,42 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresArrayTypesSql; + command.CommandText = TruncatePostgresStringTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; - public class GetPostgresArrayTypesRow + private const string GetPostgresStringTypesCntSql = "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text, COUNT(*) AS cnt FROM postgres_string_types GROUP BY c_char, c_varchar, c_character_varying, c_bpchar, c_text LIMIT 1"; + public class GetPostgresStringTypesCntRow { - public byte[] CBytea { get; set; } - public bool[] CBooleanArray { get; set; } - public string[] CTextArray { get; set; } - public int[] CIntegerArray { get; set; } - public decimal[] CDecimalArray { get; set; } - public DateTime[] CDateArray { get; set; } - public DateTime[] CTimestampArray { get; set; } + public string CChar { get; set; } + public string CVarchar { get; set; } + public string CCharacterVarying { get; set; } + public string CBpchar { get; set; } + public string CText { get; set; } + public long Cnt { get; set; } }; - public async Task GetPostgresArrayTypes() + public async Task GetPostgresStringTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesCntSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresArrayTypesRow + return new GetPostgresStringTypesCntRow { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) }; } } @@ -1376,21 +1415,20 @@ public async Task GetPostgresArrayTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesSql; + command.CommandText = GetPostgresStringTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresArrayTypesRow + return new GetPostgresStringTypesCntRow { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), - CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), - CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), - CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + CChar = reader.IsDBNull(0) ? null : reader.GetString(0), + CVarchar = reader.IsDBNull(1) ? null : reader.GetString(1), + CCharacterVarying = reader.IsDBNull(2) ? null : reader.GetString(2), + CBpchar = reader.IsDBNull(3) ? null : reader.GetString(3), + CText = reader.IsDBNull(4) ? null : reader.GetString(4), + Cnt = reader.GetInt64(5) }; } } @@ -1399,53 +1437,37 @@ public async Task GetPostgresArrayTypes() return null; } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresArrayTypesBatchArgs + private const string GetPostgresStringTypesTextSearchSql = "WITH txt_query AS ( SELECT c_text, to_tsquery('english', @to_tsquery) AS query, to_tsvector('english', c_text) AS tsv FROM postgres_string_types WHERE c_text @@ to_tsquery('english', @to_tsquery) ) SELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk FROM txt_query ORDER BY rnk DESC LIMIT 1"; + public class GetPostgresStringTypesTextSearchRow { - public byte[] CBytea { get; set; } + public string CText { get; set; } + public NpgsqlTsQuery Query { get; set; } + public NpgsqlTsVector Tsv { get; set; } + public float Rnk { get; set; } }; - public async Task InsertPostgresArrayTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea, COUNT(*) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1"; - public class GetPostgresArrayTypesCntRow + public class GetPostgresStringTypesTextSearchArgs { - public byte[] CBytea { get; set; } - public long Cnt { get; set; } + public string ToTsquery { get; set; } }; - public async Task GetPostgresArrayTypesCnt() + public async Task GetPostgresStringTypesTextSearch(GetPostgresStringTypesTextSearchArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) + using (var command = connection.CreateCommand(GetPostgresStringTypesTextSearchSql)) { + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresArrayTypesCntRow + return new GetPostgresStringTypesTextSearchRow { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) }; } } @@ -1459,16 +1481,19 @@ public async Task GetPostgresArrayTypesCnt() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresArrayTypesCntSql; + command.CommandText = GetPostgresStringTypesTextSearchSql; command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@to_tsquery", args.ToTsquery); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresArrayTypesCntRow + return new GetPostgresStringTypesTextSearchRow { - CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) + CText = reader.IsDBNull(0) ? null : reader.GetString(0), + Query = reader.GetFieldValue(1), + Tsv = reader.GetFieldValue(2), + Rnk = reader.GetFloat(3) }; } } @@ -1477,58 +1502,28 @@ public async Task GetPostgresArrayTypesCnt() return null; } - private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; - public async Task TruncatePostgresArrayTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncatePostgresArrayTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; - public class InsertPostgresGeoTypesArgs + private const string InsertPostgresDateTimeTypesSql = " INSERT INTO postgres_datetime_types ( c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval ) VALUES (@c_date, @c_time, @c_timestamp, @c_timestamp_with_tz, @c_interval)"; + public class InsertPostgresDateTimeTypesArgs { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } }; - public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) + public async Task InsertPostgresDateTimeTypes(InsertPostgresDateTimeTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(InsertPostgresDateTimeTypesSql)) { - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -1540,88 +1535,45 @@ public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = InsertPostgresGeoTypesSql; + command.CommandText = InsertPostgresDateTimeTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date", args.CDate ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_time", args.CTime ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp", args.CTimestamp ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_with_tz", args.CTimestampWithTz ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_interval", args.CInterval ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresGeoTypesBatchArgs - { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } - }; - public async Task InsertPostgresGeoTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); - await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); - await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); - await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; - public class GetPostgresGeoTypesRow + private const string GetPostgresDateTimeTypesSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1"; + public class GetPostgresDateTimeTypesRow { - public NpgsqlPoint? CPoint { get; set; } - public NpgsqlLine? CLine { get; set; } - public NpgsqlLSeg? CLseg { get; set; } - public NpgsqlBox? CBox { get; set; } - public NpgsqlPath? CPath { get; set; } - public NpgsqlPolygon? CPolygon { get; set; } - public NpgsqlCircle? CCircle { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } }; - public async Task GetPostgresGeoTypes() + public async Task GetPostgresDateTimeTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresDateTimeTypesRow { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) }; } } @@ -1635,21 +1587,19 @@ public async Task GetPostgresGeoTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresGeoTypesSql; + command.CommandText = GetPostgresDateTimeTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresGeoTypesRow + return new GetPostgresDateTimeTypesRow { - CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), - CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), - CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), - CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), - CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), - CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), - CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4) }; } } @@ -1658,14 +1608,14 @@ public async Task GetPostgresGeoTypes() return null; } - private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; - public async Task TruncatePostgresGeoTypes() + private const string TruncatePostgresDateTimeTypesSql = "TRUNCATE TABLE postgres_datetime_types"; + public async Task TruncatePostgresDateTimeTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresDateTimeTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -1678,41 +1628,42 @@ public async Task TruncatePostgresGeoTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresGeoTypesSql; + command.CommandText = TruncatePostgresDateTimeTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; - public class GetAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorArgs + private const string GetPostgresDateTimeTypesCntSql = "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval, COUNT(*) AS cnt FROM postgres_datetime_types GROUP BY c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval LIMIT 1"; + public class GetPostgresDateTimeTypesCntRow { - public string Name { get; set; } + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + public long Cnt { get; set; } }; - public async Task GetAuthor(GetAuthorArgs args) + public async Task GetPostgresDateTimeTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresDateTimeTypesCntSql)) { - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow + return new GetPostgresDateTimeTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) }; } } @@ -1726,110 +1677,128 @@ public async Task GetAuthor(GetAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorSql; + command.CommandText = GetPostgresDateTimeTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorRow - { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) - }; + return new GetPostgresDateTimeTypesCntRow + { + CDate = reader.IsDBNull(0) ? (DateTime? )null : reader.GetDateTime(0), + CTime = reader.IsDBNull(1) ? (TimeSpan? )null : reader.GetFieldValue(1), + CTimestamp = reader.IsDBNull(2) ? (DateTime? )null : reader.GetDateTime(2), + CTimestampWithTz = reader.IsDBNull(3) ? (DateTime? )null : reader.GetDateTime(3), + CInterval = reader.IsDBNull(4) ? (TimeSpan? )null : reader.GetFieldValue(4), + Cnt = reader.GetInt64(5) + }; + } + } + } + + return null; + } + + private const string InsertPostgresDateTimeTypesBatchSql = "COPY postgres_datetime_types (c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresDateTimeTypesBatchArgs + { + public DateTime? CDate { get; set; } + public TimeSpan? CTime { get; set; } + public DateTime? CTimestamp { get; set; } + public DateTime? CTimestampWithTz { get; set; } + public TimeSpan? CInterval { get; set; } + }; + public async Task InsertPostgresDateTimeTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresDateTimeTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CDate ?? (object)DBNull.Value, NpgsqlDbType.Date); + await writer.WriteAsync(row.CTime ?? (object)DBNull.Value, NpgsqlDbType.Time); + await writer.WriteAsync(row.CTimestamp ?? (object)DBNull.Value); + await writer.WriteAsync(row.CTimestampWithTz ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInterval ?? (object)DBNull.Value, NpgsqlDbType.Interval); } + + await writer.CompleteAsync(); } - } - return null; + await connection.CloseAsync(); + } } - private const string ListAuthorsSql = "SELECT id, name, bio FROM authors ORDER BY name LIMIT @limit OFFSET @offset"; - public class ListAuthorsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class ListAuthorsArgs + private const string InsertPostgresNetworkTypesSql = " INSERT INTO postgres_network_types ( c_cidr, c_inet, c_macaddr, c_macaddr8 ) VALUES ( @c_cidr, @c_inet, @c_macaddr, @c_macaddr8::macaddr8 )"; + public class InsertPostgresNetworkTypesArgs { - public int Offset { get; set; } - public int Limit { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task> ListAuthors(ListAuthorsArgs args) + public async Task InsertPostgresNetworkTypes(InsertPostgresNetworkTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(ListAuthorsSql)) + using (var command = connection.CreateCommand(InsertPostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAuthorsSql; + command.CommandText = InsertPostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@offset", args.Offset); - command.Parameters.AddWithValue("@limit", args.Limit); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAuthorsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_cidr", args.CCidr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_inet", args.CInet ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr", args.CMacaddr ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_macaddr8", args.CMacaddr8 ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string CreateAuthorSql = "INSERT INTO authors (id, name, bio) VALUES (@id, @name, @bio) RETURNING id, name, bio"; - public class CreateAuthorRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class CreateAuthorArgs + private const string GetPostgresNetworkTypesSql = "SELECT c_cidr, c_inet, c_macaddr, c_macaddr8::TEXT AS c_macaddr8 FROM postgres_network_types LIMIT 1"; + public class GetPostgresNetworkTypesRow { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public string CMacaddr8 { get; set; } }; - public async Task CreateAuthor(CreateAuthorArgs args) + public async Task GetPostgresNetworkTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateAuthorSql)) + using (var command = connection.CreateCommand(GetPostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresNetworkTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) }; } } @@ -1843,20 +1812,18 @@ public async Task CreateAuthor(CreateAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorSql; + command.CommandText = GetPostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new CreateAuthorRow + return new GetPostgresNetworkTypesRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CMacaddr8 = reader.IsDBNull(3) ? null : reader.GetString(3) }; } } @@ -1865,74 +1832,58 @@ public async Task CreateAuthor(CreateAuthorArgs args) return null; } - private const string CreateAuthorReturnIdSql = "INSERT INTO authors (name, bio) VALUES (@name, @bio) RETURNING id"; - public class CreateAuthorReturnIdRow - { - public long Id { get; set; } - }; - public class CreateAuthorReturnIdArgs - { - public string Name { get; set; } - public string Bio { get; set; } - }; - public async Task CreateAuthorReturnId(CreateAuthorReturnIdArgs args) + private const string TruncatePostgresNetworkTypesSql = "TRUNCATE TABLE postgres_network_types"; + public async Task TruncatePostgresNetworkTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateAuthorReturnIdSql)) + using (var command = connection.CreateCommand(TruncatePostgresNetworkTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateAuthorReturnIdSql; + command.CommandText = TruncatePostgresNetworkTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt64(result); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorByIdSql = "SELECT id, name, bio FROM authors WHERE id = @id LIMIT 1"; - public class GetAuthorByIdRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByIdArgs + private const string GetPostgresNetworkTypesCntSql = "SELECT c_cidr, c_inet, c_macaddr, COUNT(*) AS cnt FROM postgres_network_types GROUP BY c_cidr, c_inet, c_macaddr LIMIT 1"; + public class GetPostgresNetworkTypesCntRow { - public long Id { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } + public long Cnt { get; set; } }; - public async Task GetAuthorById(GetAuthorByIdArgs args) + public async Task GetPostgresNetworkTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorByIdSql)) + using (var command = connection.CreateCommand(GetPostgresNetworkTypesCntSql)) { - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresNetworkTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) }; } } @@ -1946,18 +1897,18 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorByIdSql; + command.CommandText = GetPostgresNetworkTypesCntSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@id", args.Id); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetAuthorByIdRow + return new GetPostgresNetworkTypesCntRow { - Id = reader.GetInt64(0), - Name = reader.GetString(1), - Bio = reader.IsDBNull(2) ? null : reader.GetString(2) + CCidr = reader.IsDBNull(0) ? (NpgsqlCidr? )null : reader.GetFieldValue(0), + CInet = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CMacaddr = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + Cnt = reader.GetInt64(3) }; } } @@ -1966,68 +1917,59 @@ public async Task GetAuthorById(GetAuthorByIdArgs args) return null; } - private const string GetAuthorByNamePatternSql = "SELECT id, name, bio FROM authors WHERE name LIKE COALESCE(@name_pattern, '%')"; - public class GetAuthorByNamePatternRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorByNamePatternArgs + private const string InsertPostgresNetworkTypesBatchSql = "COPY postgres_network_types (c_cidr, c_inet, c_macaddr) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNetworkTypesBatchArgs { - public string NamePattern { get; set; } + public NpgsqlCidr? CCidr { get; set; } + public IPAddress CInet { get; set; } + public PhysicalAddress CMacaddr { get; set; } }; - public async Task> GetAuthorByNamePattern(GetAuthorByNamePatternArgs args) + public async Task InsertPostgresNetworkTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNetworkTypesBatchSql)) { - using (var command = connection.CreateCommand(GetAuthorByNamePatternSql)) + foreach (var row in args) { - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + await writer.StartRowAsync(); + await writer.WriteAsync(row.CCidr ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInet ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMacaddr ?? (object)DBNull.Value); } - } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetAuthorByNamePatternSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name_pattern", args.NamePattern ?? (object)DBNull.Value); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorByNamePatternRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs + private const string InsertPostgresUnstructuredTypesSql = " INSERT INTO postgres_unstructured_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override ) VALUES ( @c_json::json, @c_json_string_override::json, @c_jsonb::jsonb, @c_jsonpath::jsonpath, @c_xml::xml, @c_xml_string_override::xml )"; + public class InsertPostgresUnstructuredTypesArgs { - public string Name { get; set; } + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(DeleteAuthorSql)) + using (var command = connection.CreateCommand(InsertPostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -2039,356 +1981,367 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = InsertPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_json_string_override", args.CJsonStringOverride ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonb", args.CJsonb.HasValue ? args.CJsonb.Value.GetRawText() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_jsonpath", args.CJsonpath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml", args.CXml != null ? args.CXml.OuterXml : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_xml_string_override", args.CXmlStringOverride ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string TruncateAuthorsSql = "TRUNCATE TABLE authors CASCADE"; - public async Task TruncateAuthors() + private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; + public class GetPostgresUnstructuredTypesRow + { + public JsonElement? CJson { get; set; } + public string CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public string CXmlStringOverride { get; set; } + }; + public async Task GetPostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) { - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } + } } } - return; + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateAuthorsSql; + command.CommandText = GetPostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresUnstructuredTypesRow + { + CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), + CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), + CJsonb = reader.IsDBNull(2) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(2)), + CJsonpath = reader.IsDBNull(3) ? null : reader.GetString(3), + CXml = reader.IsDBNull(4) ? null : (new Func((r, o) => + { + var xmlDoc = new XmlDocument(); + xmlDoc.LoadXml(r.GetString(o)); + return xmlDoc; + }))(reader, 4), + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + }; + } + } } + + return null; } - private const string UpdateAuthorsSql = "UPDATE authors SET bio = @bio WHERE bio IS NOT NULL"; - public class UpdateAuthorsArgs - { - public string Bio { get; set; } - }; - public async Task UpdateAuthors(UpdateAuthorsArgs args) + private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; + public async Task TruncatePostgresUnstructuredTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(UpdateAuthorsSql)) + using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) { - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = UpdateAuthorsSql; + command.CommandText = TruncatePostgresUnstructuredTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio", args.Bio ?? (object)DBNull.Value); - return await command.ExecuteNonQueryAsync(); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT [])"; - public class GetAuthorsByIdsRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsArgs + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; + public class InsertPostgresArrayTypesArgs { - public long[] LongArr1 { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> GetAuthorsByIds(GetAuthorsByIdsArgs args) + public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsSql)) + using (var command = connection.CreateCommand(InsertPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsSql; + command.CommandText = InsertPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } + command.Parameters.AddWithValue("@c_bytea", args.CBytea ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_boolean_array", args.CBooleanArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text_array", args.CTextArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer_array", args.CIntegerArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal_array", args.CDecimalArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_date_array", args.CDateArray ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_timestamp_array", args.CTimestampArray ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string GetAuthorsByIdsAndNamesSql = "SELECT id, name, bio FROM authors WHERE id = ANY(@longArr_1::BIGINT []) AND name = ANY(@stringArr_2::TEXT [])"; - public class GetAuthorsByIdsAndNamesRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - }; - public class GetAuthorsByIdsAndNamesArgs + private const string GetPostgresArrayTypesSql = "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1"; + public class GetPostgresArrayTypesRow { - public long[] LongArr1 { get; set; } - public string[] StringArr2 { get; set; } + public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CDateArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; - public async Task> GetAuthorsByIdsAndNames(GetAuthorsByIdsAndNamesArgs args) + public async Task GetPostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByIdsAndNamesSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByIdsAndNamesSql; + command.CommandText = GetPostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@longArr_1", args.LongArr1); - command.Parameters.AddWithValue("@stringArr_2", args.StringArr2); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); - return result; - } - } - } - - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public Guid Id { get; set; } - }; - public class CreateBookArgs - { - public string Name { get; set; } - public long AuthorId { get; set; } - }; - public async Task CreateBook(CreateBookArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(CreateBookSql)) + if (await reader.ReadAsync()) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); + return new GetPostgresArrayTypesRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CDateArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + CTimestampArray = reader.IsDBNull(6) ? null : reader.GetFieldValue(6) + }; } } } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = CreateBookSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Guid.Parse(result?.ToString()); - } + + return null; } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresArrayTypesBatchArgs { - public Author Author { get; set; } - public Book Book { get; set; } + public byte[] CBytea { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task InsertPostgresArrayTypesBatch(List args) { - if (this.Transaction == null) + using (var connection = new NpgsqlConnection(ConnectionString)) { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresArrayTypesBatchSql)) { - using (var command = connection.CreateCommand(ListAllAuthorsBooksSql)) + foreach (var row in args) { - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBytea ?? (object)DBNull.Value); } - } - } - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = ListAllAuthorsBooksSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + await writer.CompleteAsync(); } + + await connection.CloseAsync(); } } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string GetPostgresArrayTypesCntSql = "SELECT c_bytea, COUNT(*) AS cnt FROM postgres_array_types GROUP BY c_bytea LIMIT 1"; + public class GetPostgresArrayTypesCntRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public byte[] CBytea { get; set; } + public long Cnt { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task GetPostgresArrayTypesCnt() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetDuplicateAuthorsSql)) + using (var command = connection.CreateCommand(GetPostgresArrayTypesCntSql)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetPostgresArrayTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt64(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetPostgresArrayTypesCntRow + { + CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public long Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs - { - public string Name { get; set; } - }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + private const string TruncatePostgresArrayTypesSql = "TRUNCATE TABLE postgres_array_types"; + public async Task TruncatePostgresArrayTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetAuthorsByBookNameSql)) + using (var command = connection.CreateCommand(TruncatePostgresArrayTypesSql)) { - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = TruncatePostgresArrayTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - using (var reader = await command.ExecuteReaderAsync()) - { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt64(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetFieldValue(3), Name = reader.GetString(4), AuthorId = reader.GetInt64(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; - } + await command.ExecuteNonQueryAsync(); } } - private const string CreateExtendedBioSql = "INSERT INTO extended.bios (author_name, name, bio_type) VALUES (@author_name, @name, @bio_type)"; - public class CreateExtendedBioArgs + private const string InsertPostgresGeoTypesSql = " INSERT INTO postgres_geometric_types ( c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle ) VALUES (@c_point, @c_line, @c_lseg, @c_box, @c_path, @c_polygon, @c_circle)"; + public class InsertPostgresGeoTypesArgs { - public string AuthorName { get; set; } - public string Name { get; set; } - public ExtendedBioType? BioType { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task CreateExtendedBio(CreateExtendedBioArgs args) + public async Task InsertPostgresGeoTypes(InsertPostgresGeoTypesArgs args) { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(CreateExtendedBioSql)) + using (var command = connection.CreateCommand(InsertPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@author_name", args.AuthorName); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } @@ -2400,44 +2353,88 @@ public async Task CreateExtendedBio(CreateExtendedBioArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateExtendedBioSql; + command.CommandText = InsertPostgresGeoTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@author_name", args.AuthorName); - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); + command.Parameters.AddWithValue("@c_point", args.CPoint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_line", args.CLine ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_lseg", args.CLseg ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_box", args.CBox ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_path", args.CPath ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_polygon", args.CPolygon ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_circle", args.CCircle ?? (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string GetFirstExtendedBioByTypeSql = "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = @bio_type LIMIT 1"; - public class GetFirstExtendedBioByTypeRow + private const string InsertPostgresGeoTypesBatchSql = "COPY postgres_geometric_types (c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresGeoTypesBatchArgs { - public string AuthorName { get; set; } - public string Name { get; set; } - public ExtendedBioType? BioType { get; set; } + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public class GetFirstExtendedBioByTypeArgs + public async Task InsertPostgresGeoTypesBatch(List args) { - public ExtendedBioType? BioType { get; set; } + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresGeoTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CPoint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLine ?? (object)DBNull.Value); + await writer.WriteAsync(row.CLseg ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBox ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPath ?? (object)DBNull.Value); + await writer.WriteAsync(row.CPolygon ?? (object)DBNull.Value); + await writer.WriteAsync(row.CCircle ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresGeoTypesSql = "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1"; + public class GetPostgresGeoTypesRow + { + public NpgsqlPoint? CPoint { get; set; } + public NpgsqlLine? CLine { get; set; } + public NpgsqlLSeg? CLseg { get; set; } + public NpgsqlBox? CBox { get; set; } + public NpgsqlPath? CPath { get; set; } + public NpgsqlPolygon? CPolygon { get; set; } + public NpgsqlCircle? CCircle { get; set; } }; - public async Task GetFirstExtendedBioByType(GetFirstExtendedBioByTypeArgs args) + public async Task GetPostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetFirstExtendedBioByTypeSql)) + using (var command = connection.CreateCommand(GetPostgresGeoTypesSql)) { - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetPostgresGeoTypesRow { - AuthorName = reader.GetString(0), - Name = reader.GetString(1), - BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) }; } } @@ -2451,18 +2448,21 @@ public async Task GetFirstExtendedBioByType(GetFir throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetFirstExtendedBioByTypeSql; + command.CommandText = GetPostgresGeoTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@bio_type", args.BioType != null ? args.BioType.Value.Stringify() : (object)DBNull.Value); using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetFirstExtendedBioByTypeRow + return new GetPostgresGeoTypesRow { - AuthorName = reader.GetString(0), - Name = reader.GetString(1), - BioType = reader.IsDBNull(2) ? (ExtendedBioType? )null : reader.GetString(2).ToExtendedBioType() + CPoint = reader.IsDBNull(0) ? (NpgsqlPoint? )null : reader.GetFieldValue(0), + CLine = reader.IsDBNull(1) ? (NpgsqlLine? )null : reader.GetFieldValue(1), + CLseg = reader.IsDBNull(2) ? (NpgsqlLSeg? )null : reader.GetFieldValue(2), + CBox = reader.IsDBNull(3) ? (NpgsqlBox? )null : reader.GetFieldValue(3), + CPath = reader.IsDBNull(4) ? (NpgsqlPath? )null : reader.GetFieldValue(4), + CPolygon = reader.IsDBNull(5) ? (NpgsqlPolygon? )null : reader.GetFieldValue(5), + CCircle = reader.IsDBNull(6) ? (NpgsqlCircle? )null : reader.GetFieldValue(6) }; } } @@ -2471,14 +2471,14 @@ public async Task GetFirstExtendedBioByType(GetFir return null; } - private const string TruncateExtendedBiosSql = "TRUNCATE TABLE extended.bios"; - public async Task TruncateExtendedBios() + private const string TruncatePostgresGeoTypesSql = "TRUNCATE TABLE postgres_geometric_types"; + public async Task TruncatePostgresGeoTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncateExtendedBiosSql)) + using (var command = connection.CreateCommand(TruncatePostgresGeoTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -2491,7 +2491,7 @@ public async Task TruncateExtendedBios() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncateExtendedBiosSql; + command.CommandText = TruncatePostgresGeoTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index cea3ae5f..e836ca89 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "postgresql", "schema": [ - "examples/config/postgresql/types/schema.sql", - "examples/config/postgresql/authors/schema.sql" + "examples/config/postgresql/authors/schema.sql", + "examples/config/postgresql/types/schema.sql" ], "queries": [ - "examples/config/postgresql/types/query.sql", - "examples/config/postgresql/authors/query.sql" + "examples/config/postgresql/authors/query.sql", + "examples/config/postgresql/types/query.sql" ], "codegen": { "out": "examples/NpgsqlLegacyExample", @@ -25,6 +25,96 @@ { "name": "public", "tables": [ + { + "rel": { + "name": "authors" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + } + } + ] + }, + { + "rel": { + "name": "books" + }, + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + }, + { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "schema": "pg_catalog", + "name": "int8" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + } + } + ] + }, { "rel": { "name": "postgres_types" @@ -565,96 +655,6 @@ } } ] - }, - { - "rel": { - "name": "authors" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - } - } - ] - }, - { - "rel": { - "name": "books" - }, - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - }, - { - "name": "author_id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "schema": "pg_catalog", - "name": "int8" - } - }, - { - "name": "description", - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "text" - } - } - ] } ], "enums": [ @@ -32575,1303 +32575,1521 @@ }, "queries": [ { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ + "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "c_boolean", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bool" - }, - "originalName": "c_boolean" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ { - "number": 4, + "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int4" + "name": "text" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_bigint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_real", + "name": "offset", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" + "name": "integer" + } } }, { - "number": 7, + "number": 2, "column": { - "name": "c_numeric", + "name": "limit", + "notNull": true, "length": -1, "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" + "name": "integer" + } } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", + "name": "CreateAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 9, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "id", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "bigserial" }, - "originalName": "c_double_precision" + "originalName": "id" } }, { - "number": 10, + "number": 2, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } }, { - "number": 11, + "number": 3, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" - } - }, - { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "originalName": "bio" } } ], - "comments": [ - " Basic types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "authors" } }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.bool" + "name": "text" }, - "originalName": "c_boolean" + "originalName": "name" } }, { "number": 2, "column": { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.int2" + "name": "text" }, - "originalName": "c_smallint" + "originalName": "bio" } - }, - { - "number": 3, - "column": { - "name": "c_integer", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" - } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ { - "number": 4, - "column": { - "name": "c_bigint", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int8" - }, - "originalName": "c_bigint" - } + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 5, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 6, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_numeric", + "name": "id", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.numeric" + "name": "bigserial" }, - "originalName": "c_numeric" + "originalName": "id" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "bigserial" + }, + "originalName": "id" }, { - "number": 7, - "column": { - "name": "c_decimal", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "name" }, { - "number": 8, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "text" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_double_precision", + "name": "name_pattern", "length": -1, + "isNamedParam": true, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "pg_catalog.float8" + "name": "text" }, - "originalName": "c_double_precision" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = $1", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 9, + "number": 1, "column": { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "money" + "name": "text" }, - "originalName": "c_money" + "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE authors CASCADE", + "name": "TruncateAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ { - "number": 10, + "number": 1, "column": { - "name": "c_uuid", + "name": "bio", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "authors" }, "type": { - "name": "uuid" + "name": "text" }, - "originalName": "c_uuid" + "originalName": "bio" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { - "name": "c_boolean", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bool" + "name": "bigserial" }, - "originalName": "c_boolean" + "originalName": "id" }, { - "name": "c_bit", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "bit" + "name": "text" }, - "originalName": "c_bit" + "originalName": "name" }, { - "name": "c_smallint", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int2" + "name": "text" }, - "originalName": "c_smallint" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_integer", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int4" - }, - "originalName": "c_integer" - }, + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", + "name": "GetAuthorsByIdsAndNames", + "cmd": ":many", + "columns": [ { - "name": "c_bigint", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bigserial" }, - "originalName": "c_bigint" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_numeric", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "numeric" + "name": "text" }, - "originalName": "c_numeric" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 1, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "schema": "pg_catalog", + "name": "int8" + }, + "arrayDims": 1 + } }, { - "name": "c_double_precision", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "float8" - }, - "originalName": "c_double_precision" - }, + "number": 2, + "column": { + "notNull": true, + "isArray": true, + "length": -1, + "type": { + "name": "text" + }, + "arrayDims": 1 + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", + "name": "CreateBook", + "cmd": ":execlastid", + "columns": [ { - "name": "c_money", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "books" }, "type": { - "name": "money" + "name": "uuid" }, - "originalName": "c_money" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } }, { - "name": "c_uuid", + "number": 2, + "column": { + "name": "author_id", + "notNull": true, + "length": -1, + "table": { + "schema": "public", + "name": "books" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "author_id" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "books" + } + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", + "columns": [ + { + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_enum", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" + "type": {}, + "embedTable": { + "name": "books" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "c_smallint", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "int2" - }, - "originalName": "c_smallint" + "type": {}, + "embedTable": { + "name": "authors" + } }, { - "name": "c_boolean", + "name": "authors", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, + "type": {}, + "embedTable": { + "name": "authors" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", + "name": "GetAuthorsByBookName", + "cmd": ":many", + "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int4" + "name": "bigserial" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_bigint", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "text" }, - "originalName": "c_bigint" + "originalName": "name" }, { - "name": "c_real", + "name": "bio", "length": -1, "table": { - "name": "postgres_types" + "name": "authors" }, "type": { - "schema": "pg_catalog", - "name": "float4" + "name": "text" }, - "originalName": "c_real" + "originalName": "bio" }, { - "name": "c_numeric", + "name": "books", "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_numeric" + "type": {}, + "embedTable": { + "name": "books" + } + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "books" + }, + "type": { + "name": "text" + }, + "originalName": "name" + } + } + ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", + "name": "CreateExtendedBio", + "cmd": ":exec", + "parameters": [ + { + "number": 1, + "column": { + "name": "author_name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "author_name" + } }, { - "name": "c_decimal", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "numeric" - }, - "originalName": "c_decimal" + "number": 2, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "pg_catalog.varchar" + }, + "originalName": "name" + } }, { - "name": "c_double_precision", + "number": 3, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "schema": "extended", + "name": "bios" + } + }, + { + "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", + "name": "GetFirstExtendedBioByType", + "cmd": ":one", + "columns": [ + { + "name": "author_name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { "schema": "pg_catalog", - "name": "float8" + "name": "varchar" }, - "originalName": "c_double_precision" + "originalName": "author_name" }, { - "name": "c_money", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "money" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_money" + "originalName": "name" }, { - "name": "c_uuid", + "name": "bio_type", "length": -1, "table": { - "name": "postgres_types" + "schema": "extended", + "name": "bios" }, "type": { - "name": "uuid" + "schema": "extended", + "name": "bio_type" }, - "originalName": "c_uuid" - }, + "originalName": "bio_type" + } + ], + "parameters": [ { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" + "number": 1, + "column": { + "name": "bio_type", + "length": -1, + "table": { + "schema": "extended", + "name": "bios" + }, + "type": { + "name": "extended.bio_type" + }, + "originalName": "bio_type" } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "TRUNCATE TABLE extended.bios", + "name": "TruncateExtendedBios", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "name": "InsertPostgresTypes", + "cmd": ":exec", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_real", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_decimal", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" + } + }, + { + "number": 11, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 12, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "comments": [ + " Basic types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } }, { - "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.bool" }, - "originalName": "c_char" + "originalName": "c_boolean" } }, { "number": 2, "column": { - "name": "c_varchar", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int2" }, - "originalName": "c_varchar" + "originalName": "c_smallint" } }, { "number": 3, "column": { - "name": "c_character_varying", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.int4" }, - "originalName": "c_character_varying" + "originalName": "c_integer" } }, { "number": 4, "column": { - "name": "c_bpchar", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "pg_catalog.int8" }, - "originalName": "c_bpchar" + "originalName": "c_bigint" } }, { "number": 5, "column": { - "name": "c_text", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "pg_catalog.float4" }, - "originalName": "c_text" + "originalName": "c_real" } - } - ], - "comments": [ - " String types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_string_types" - } - }, - { - "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresStringTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + }, { - "number": 1, + "number": 6, "column": { - "name": "c_char", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.bpchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_char" + "originalName": "c_numeric" } }, { - "number": 2, + "number": 7, "column": { - "name": "c_varchar", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.numeric" }, - "originalName": "c_varchar" + "originalName": "c_decimal" } }, { - "number": 3, + "number": 8, "column": { - "name": "c_character_varying", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "pg_catalog.float8" }, - "originalName": "c_character_varying" + "originalName": "c_double_precision" } }, { - "number": 4, + "number": 9, "column": { - "name": "c_bpchar", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "name": "money" }, - "originalName": "c_bpchar" + "originalName": "c_money" } }, { - "number": 5, + "number": 10, "column": { - "name": "c_text", + "name": "c_uuid", "length": -1, "table": { "schema": "public", - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "uuid" }, - "originalName": "c_text" + "originalName": "c_uuid" } } ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_string_types" + "name": "postgres_types" } }, { - "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", - "name": "GetPostgresStringTypes", + "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", "cmd": ":one", "columns": [ { - "name": "c_char", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "bool" }, - "originalName": "c_char" + "originalName": "c_boolean" }, { - "name": "c_varchar", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "bit" }, - "originalName": "c_varchar" + "originalName": "c_bit" }, { - "name": "c_character_varying", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "int2" }, - "originalName": "c_character_varying" + "originalName": "c_smallint" }, { - "name": "c_bpchar", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "int4" }, - "originalName": "c_bpchar" + "originalName": "c_integer" }, { - "name": "c_text", - "length": -1, - "table": { - "name": "postgres_string_types" - }, - "type": { - "name": "text" - }, - "originalName": "c_text" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_string_types", - "name": "TruncatePostgresStringTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", - "name": "GetPostgresStringTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_char", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "bpchar" + "name": "int8" }, - "originalName": "c_char" + "originalName": "c_bigint" }, { - "name": "c_varchar", + "name": "c_real", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "float4" }, - "originalName": "c_varchar" + "originalName": "c_real" }, { - "name": "c_character_varying", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "varchar" + "name": "numeric" }, - "originalName": "c_character_varying" + "originalName": "c_numeric" }, { - "name": "c_bpchar", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "bpchar" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_bpchar" + "originalName": "c_decimal" }, { - "name": "c_text", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_string_types" + "name": "postgres_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "float8" }, - "originalName": "c_text" + "originalName": "c_double_precision" }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, - { - "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", - "name": "GetPostgresStringTypesTextSearch", - "cmd": ":one", - "columns": [ - { - "name": "c_text", + "name": "c_money", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "text" + "name": "money" }, - "originalName": "c_text" + "originalName": "c_money" }, { - "name": "query", - "notNull": true, + "name": "c_uuid", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsquery" + "name": "uuid" }, - "originalName": "query" + "originalName": "c_uuid" }, { - "name": "tsv", - "notNull": true, + "name": "c_enum", "length": -1, "table": { - "name": "txt_query" + "name": "postgres_types" }, "type": { - "name": "tsvector" + "name": "c_enum" }, - "originalName": "tsv" - }, - { - "name": "rnk", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "real" - } - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "to_tsquery", - "notNull": true, - "length": -1, - "type": { - "name": "text" - } - } + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_date", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "date" - }, - "originalName": "c_date" - } - }, - { - "number": 2, - "column": { - "name": "c_time", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.time" - }, - "originalName": "c_time" - } - }, - { - "number": 3, - "column": { - "name": "c_timestamp", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamp" - }, - "originalName": "c_timestamp" - } - }, - { - "number": 4, - "column": { - "name": "c_timestamp_with_tz", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.timestamptz" - }, - "originalName": "c_timestamp_with_tz" - } - }, - { - "number": 5, - "column": { - "name": "c_interval", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_datetime_types" - }, - "type": { - "name": "pg_catalog.interval" - }, - "originalName": "c_interval" - } - } - ], - "comments": [ - " DateTime types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_datetime_types" - } - }, - { - "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", - "name": "GetPostgresDateTimeTypes", + "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_date", + "name": "c_smallint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "int2" }, - "originalName": "c_date" + "originalName": "c_smallint" }, { - "name": "c_time", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "bool" }, - "originalName": "c_time" + "originalName": "c_boolean" }, { - "name": "c_timestamp", + "name": "c_integer", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "int4" }, - "originalName": "c_timestamp" + "originalName": "c_integer" }, { - "name": "c_timestamp_with_tz", + "name": "c_bigint", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamptz" + "name": "int8" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bigint" }, { - "name": "c_interval", + "name": "c_real", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "interval" + "name": "float4" }, - "originalName": "c_interval" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_datetime_types", - "name": "TruncatePostgresDateTimeTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", - "name": "GetPostgresDateTimeTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_real" + }, { - "name": "c_date", + "name": "c_numeric", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "name": "date" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "c_date" + "originalName": "c_numeric" }, { - "name": "c_time", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "time" + "name": "numeric" }, - "originalName": "c_time" + "originalName": "c_decimal" }, { - "name": "c_timestamp", + "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { "schema": "pg_catalog", - "name": "timestamp" + "name": "float8" }, - "originalName": "c_timestamp" + "originalName": "c_double_precision" }, { - "name": "c_timestamp_with_tz", + "name": "c_money", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "timestamptz" + "name": "money" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_money" }, { - "name": "c_interval", + "name": "c_uuid", "length": -1, "table": { - "name": "postgres_datetime_types" + "name": "postgres_types" }, "type": { - "schema": "pg_catalog", - "name": "interval" + "name": "uuid" }, - "originalName": "c_interval" + "originalName": "c_uuid" }, { "name": "cnt", @@ -33879,262 +34097,362 @@ "length": -1, "isFuncCall": true, "type": { - "name": "bigint" + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ + { + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" + } + }, + { + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", - "name": "InsertPostgresDateTimeTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_date", + "name": "c_char", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "date" + "name": "pg_catalog.bpchar" }, - "originalName": "c_date" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_time", + "name": "c_varchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.time" + "name": "pg_catalog.varchar" }, - "originalName": "c_time" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_timestamp", + "name": "c_character_varying", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.varchar" }, - "originalName": "c_timestamp" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_timestamp_with_tz", + "name": "c_bpchar", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.timestamptz" + "name": "bpchar" }, - "originalName": "c_timestamp_with_tz" + "originalName": "c_bpchar" } }, { "number": 5, "column": { - "name": "c_interval", + "name": "c_text", "length": -1, "table": { "schema": "public", - "name": "postgres_datetime_types" + "name": "postgres_string_types" }, "type": { - "name": "pg_catalog.interval" + "name": "text" }, - "originalName": "c_interval" + "originalName": "c_text" } } ], + "comments": [ + " String types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_datetime_types" + "name": "postgres_string_types" } }, { - "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", - "name": "InsertPostgresNetworkTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_string_types \n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresStringTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_cidr", + "name": "c_char", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "name": "pg_catalog.bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" } }, { "number": 2, "column": { - "name": "c_inet", + "name": "c_varchar", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "name": "pg_catalog.varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" } }, { "number": 3, "column": { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, - "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "name": "pg_catalog.varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" } }, { "number": 4, "column": { - "name": "c_macaddr8", + "name": "c_bpchar", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + } + }, + { + "number": 5, + "column": { + "name": "c_text", "length": -1, + "table": { + "schema": "public", + "name": "postgres_string_types" + }, "type": { - "name": "macaddr8" - } + "name": "text" + }, + "originalName": "c_text" } } ], - "comments": [ - " Network types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_network_types" + "name": "postgres_string_types" } }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", - "name": "GetPostgresNetworkTypes", + "text": "SELECT c_char, c_varchar, c_character_varying, c_bpchar, c_text FROM postgres_string_types LIMIT 1", + "name": "GetPostgresStringTypes", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" }, { - "name": "c_macaddr8", - "notNull": true, + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", "length": -1, + "table": { + "name": "postgres_string_types" + }, "type": { "name": "text" - } + }, + "originalName": "c_text" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_network_types", - "name": "TruncatePostgresNetworkTypes", + "text": "TRUNCATE TABLE postgres_string_types", + "name": "TruncatePostgresStringTypes", "cmd": ":exec", "filename": "query.sql" }, { - "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", - "name": "GetPostgresNetworkTypesCnt", + "text": "SELECT\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text,\n COUNT(*) AS cnt\nFROM postgres_string_types\nGROUP BY\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\nLIMIT 1", + "name": "GetPostgresStringTypesCnt", "cmd": ":one", "columns": [ { - "name": "c_cidr", + "name": "c_char", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "cidr" + "schema": "pg_catalog", + "name": "bpchar" }, - "originalName": "c_cidr" + "originalName": "c_char" }, { - "name": "c_inet", + "name": "c_varchar", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "inet" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_inet" + "originalName": "c_varchar" }, { - "name": "c_macaddr", + "name": "c_character_varying", "length": -1, "table": { - "name": "postgres_network_types" + "name": "postgres_string_types" }, "type": { - "name": "macaddr" + "schema": "pg_catalog", + "name": "varchar" }, - "originalName": "c_macaddr" + "originalName": "c_character_varying" + }, + { + "name": "c_bpchar", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "bpchar" + }, + "originalName": "c_bpchar" + }, + { + "name": "c_text", + "length": -1, + "table": { + "name": "postgres_string_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text" }, { "name": "cnt", @@ -34149,488 +34467,295 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", - "name": "InsertPostgresNetworkTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_cidr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "cidr" - }, - "originalName": "c_cidr" - } - }, - { - "number": 2, - "column": { - "name": "c_inet", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "inet" - }, - "originalName": "c_inet" - } - }, - { - "number": 3, - "column": { - "name": "c_macaddr", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_network_types" - }, - "type": { - "name": "macaddr" - }, - "originalName": "c_macaddr" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_network_types" - } - }, - { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_json", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 2, - "column": { - "name": "c_json_string_override", - "length": -1, - "type": { - "name": "json" - } - } - }, - { - "number": 3, - "column": { - "name": "c_jsonb", - "length": -1, - "type": { - "name": "jsonb" - } - } - }, - { - "number": 4, - "column": { - "name": "c_jsonpath", - "length": -1, - "type": { - "name": "jsonpath" - } - } - }, - { - "number": 5, - "column": { - "name": "c_xml", - "length": -1, - "type": { - "name": "xml" - } - } - }, - { - "number": 6, - "column": { - "name": "c_xml_string_override", - "length": -1, - "type": { - "name": "xml" - } - } - } - ], - "comments": [ - " Unstructured types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_unstructured_types" - } - }, - { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "WITH txt_query AS (\n SELECT \n c_text, \n to_tsquery('english', $1) AS query,\n to_tsvector('english', c_text) AS tsv\n FROM postgres_string_types \n WHERE c_text @@ to_tsquery('english', $1)\n)\n\nSELECT txt_query.c_text, txt_query.query, txt_query.tsv, ts_rank(tsv, query) AS rnk\nFROM txt_query\nORDER BY rnk DESC\nLIMIT 1", + "name": "GetPostgresStringTypesTextSearch", "cmd": ":one", "columns": [ { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - }, - "originalName": "c_json" - }, - { - "name": "c_json_string_override", + "name": "c_text", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "json" + "name": "text" }, - "originalName": "c_json_string_override" + "originalName": "c_text" }, { - "name": "c_jsonb", + "name": "query", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonb" + "name": "tsquery" }, - "originalName": "c_jsonb" + "originalName": "query" }, { - "name": "c_jsonpath", + "name": "tsv", + "notNull": true, "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "txt_query" }, "type": { - "name": "jsonpath" + "name": "tsvector" }, - "originalName": "c_jsonpath" + "originalName": "tsv" }, { - "name": "c_xml", + "name": "rnk", + "notNull": true, "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, + "isFuncCall": true, "type": { - "name": "xml" - }, - "originalName": "c_xml" - }, + "name": "real" + } + } + ], + "parameters": [ { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - }, - "originalName": "c_xml_string_override" + "number": 1, + "column": { + "name": "to_tsquery", + "notNull": true, + "length": -1, + "type": { + "name": "text" + } + } } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresArrayTypes", + "text": "\nINSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.bool" + "name": "pg_catalog.time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "name": "pg_catalog.timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" } }, { "number": 4, - "column": { - "name": "c_integer_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer_array", - "arrayDims": 1 - } - }, - { - "number": 5, - "column": { - "name": "c_decimal_array", - "isArray": true, - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal_array", - "arrayDims": 1 - } - }, - { - "number": 6, - "column": { - "name": "c_date_array", - "isArray": true, + "column": { + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "date" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" } }, { - "number": 7, + "number": 5, "column": { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "pg_catalog.timestamp" + "name": "pg_catalog.interval" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 + "originalName": "c_interval" } } ], "comments": [ - " Array types " + " DateTime types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" } }, { - "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", - "name": "GetPostgresArrayTypes", + "text": "SELECT c_date, c_time, c_timestamp, c_timestamp_with_tz, c_interval FROM postgres_datetime_types LIMIT 1", + "name": "GetPostgresDateTimeTypes", "cmd": ":one", "columns": [ { - "name": "c_bytea", + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "name": "date" }, - "originalName": "c_bytea" + "originalName": "c_date" }, { - "name": "c_boolean_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "bool" + "name": "time" }, - "originalName": "c_boolean_array", - "arrayDims": 1 + "originalName": "c_time" }, { - "name": "c_text_array", - "isArray": true, + "name": "c_timestamp", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "c_text_array", - "arrayDims": 1 + "originalName": "c_timestamp" }, { - "name": "c_integer_array", - "isArray": true, + "name": "c_timestamp_with_tz", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "int4" + "name": "timestamptz" }, - "originalName": "c_integer_array", - "arrayDims": 1 + "originalName": "c_timestamp_with_tz" }, { - "name": "c_decimal_array", - "isArray": true, + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "interval" }, - "originalName": "c_decimal_array", - "arrayDims": 1 - }, + "originalName": "c_interval" + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_datetime_types", + "name": "TruncatePostgresDateTimeTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval,\n COUNT(*) AS cnt\nFROM postgres_datetime_types\nGROUP BY\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\nLIMIT 1", + "name": "GetPostgresDateTimeTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_date_array", - "isArray": true, + "name": "c_date", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { "name": "date" }, - "originalName": "c_date_array", - "arrayDims": 1 + "originalName": "c_date" }, { - "name": "c_timestamp_array", - "isArray": true, + "name": "c_time", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "time" + }, + "originalName": "c_time" + }, + { + "name": "c_timestamp", + "length": -1, + "table": { + "name": "postgres_datetime_types" }, "type": { "schema": "pg_catalog", "name": "timestamp" }, - "originalName": "c_timestamp_array", - "arrayDims": 1 - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", - "name": "InsertPostgresArrayTypesBatch", - "cmd": ":copyfrom", - "parameters": [ + "originalName": "c_timestamp" + }, { - "number": 1, - "column": { - "name": "c_bytea", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_array_types" - }, - "type": { - "name": "bytea" - }, - "originalName": "c_bytea" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_array_types" - } - }, - { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", - "name": "GetPostgresArrayTypesCnt", - "cmd": ":one", - "columns": [ + "name": "c_timestamp_with_tz", + "length": -1, + "table": { + "name": "postgres_datetime_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamptz" + }, + "originalName": "c_timestamp_with_tz" + }, { - "name": "c_bytea", + "name": "c_interval", "length": -1, "table": { - "name": "postgres_array_types" + "name": "postgres_datetime_types" }, "type": { - "name": "bytea" + "schema": "pg_catalog", + "name": "interval" }, - "originalName": "c_bytea" + "originalName": "c_interval" }, { "name": "cnt", @@ -34645,1218 +34770,1093 @@ "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_array_types", - "name": "TruncatePostgresArrayTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypes", - "cmd": ":exec", + "text": "INSERT INTO postgres_datetime_types\n(\n c_date,\n c_time,\n c_timestamp,\n c_timestamp_with_tz,\n c_interval\n) VALUES ($1, $2, $3, $4, $5)", + "name": "InsertPostgresDateTimeTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_date", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "point" + "name": "date" }, - "originalName": "c_point" + "originalName": "c_date" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_time", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "line" + "name": "pg_catalog.time" }, - "originalName": "c_line" + "originalName": "c_time" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_timestamp", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "lseg" + "name": "pg_catalog.timestamp" }, - "originalName": "c_lseg" + "originalName": "c_timestamp" } }, { "number": 4, "column": { - "name": "c_box", + "name": "c_timestamp_with_tz", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "box" + "name": "pg_catalog.timestamptz" }, - "originalName": "c_box" + "originalName": "c_timestamp_with_tz" } }, { "number": 5, "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_interval", "length": -1, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" }, "type": { - "name": "circle" + "name": "pg_catalog.interval" }, - "originalName": "c_circle" + "originalName": "c_interval" } } ], - "comments": [ - " Geometric types " - ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_datetime_types" } }, { - "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", - "name": "InsertPostgresGeoTypesBatch", - "cmd": ":copyfrom", + "text": "\nINSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8\n) VALUES (\n $1, \n $2, \n $3, \n $4::macaddr8\n)", + "name": "InsertPostgresNetworkTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_point", + "name": "c_cidr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "point" + "name": "cidr" }, - "originalName": "c_point" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "c_line", + "name": "c_inet", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "line" + "name": "inet" }, - "originalName": "c_line" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "c_lseg", + "name": "c_macaddr", "length": -1, + "isNamedParam": true, "table": { "schema": "public", - "name": "postgres_geometric_types" + "name": "postgres_network_types" }, "type": { - "name": "lseg" + "name": "macaddr" }, - "originalName": "c_lseg" + "originalName": "c_macaddr" } }, { "number": 4, "column": { - "name": "c_box", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - } - }, - { - "number": 5, - "column": { - "name": "c_path", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - } - }, - { - "number": 6, - "column": { - "name": "c_polygon", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - } - }, - { - "number": 7, - "column": { - "name": "c_circle", + "name": "c_macaddr8", "length": -1, - "table": { - "schema": "public", - "name": "postgres_geometric_types" - }, "type": { - "name": "circle" - }, - "originalName": "c_circle" + "name": "macaddr8" + } } } ], + "comments": [ + " Network types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_geometric_types" + "name": "postgres_network_types" } }, { - "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", - "name": "GetPostgresGeoTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_point", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "point" - }, - "originalName": "c_point" - }, - { - "name": "c_line", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "line" - }, - "originalName": "c_line" - }, - { - "name": "c_lseg", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "lseg" - }, - "originalName": "c_lseg" - }, - { - "name": "c_box", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "box" - }, - "originalName": "c_box" - }, - { - "name": "c_path", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "path" - }, - "originalName": "c_path" - }, - { - "name": "c_polygon", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "polygon" - }, - "originalName": "c_polygon" - }, - { - "name": "c_circle", - "length": -1, - "table": { - "name": "postgres_geometric_types" - }, - "type": { - "name": "circle" - }, - "originalName": "c_circle" - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_geometric_types", - "name": "TruncatePostgresGeoTypes", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name = $1 LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio \nFROM authors\nORDER BY name\nLIMIT $2\nOFFSET $1", - "name": "ListAuthors", - "cmd": ":many", + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n c_macaddr8::TEXT AS c_macaddr8\nFROM postgres_network_types\nLIMIT 1", + "name": "GetPostgresNetworkTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } + "originalName": "c_macaddr" }, { - "number": 2, - "column": { - "name": "limit", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } + "name": "c_macaddr8", + "notNull": true, + "length": -1, + "type": { + "name": "text" } } ], "filename": "query.sql" }, { - "text": "INSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio", - "name": "CreateAuthor", + "text": "TRUNCATE TABLE postgres_network_types", + "name": "TruncatePostgresNetworkTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_cidr,\n c_inet,\n c_macaddr,\n COUNT(*) AS cnt\nFROM postgres_network_types\nGROUP BY\n c_cidr,\n c_inet,\n c_macaddr\nLIMIT 1", + "name": "GetPostgresNetworkTypesCnt", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" }, { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" }, { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_network_types\n(\n c_cidr,\n c_inet,\n c_macaddr\n) VALUES ($1, $2, $3)", + "name": "InsertPostgresNetworkTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "id", - "notNull": true, + "name": "c_cidr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "bigserial" + "name": "cidr" }, - "originalName": "id" + "originalName": "c_cidr" } }, { "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_inet", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "inet" }, - "originalName": "name" + "originalName": "c_inet" } }, { "number": 3, "column": { - "name": "bio", + "name": "c_macaddr", "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_network_types" }, "type": { - "name": "text" + "name": "macaddr" }, - "originalName": "bio" + "originalName": "c_macaddr" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_network_types" } }, { - "text": "INSERT INTO authors (name, bio) VALUES ($1, $2) RETURNING id", - "name": "CreateAuthorReturnId", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - ], + "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", + "name": "InsertPostgresUnstructuredTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_json", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "name" + "name": "json" + } } }, { "number": 2, "column": { - "name": "bio", + "name": "c_json_string_override", "length": -1, - "table": { - "schema": "public", - "name": "authors" - }, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "json" + } + } + }, + { + "number": 3, + "column": { + "name": "c_jsonb", + "length": -1, + "type": { + "name": "jsonb" + } + } + }, + { + "number": 4, + "column": { + "name": "c_jsonpath", + "length": -1, + "type": { + "name": "jsonpath" + } + } + }, + { + "number": 5, + "column": { + "name": "c_xml", + "length": -1, + "type": { + "name": "xml" + } + } + }, + { + "number": 6, + "column": { + "name": "c_xml_string_override", + "length": -1, + "type": { + "name": "xml" + } } } ], + "comments": [ + " Unstructured types " + ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "postgres_unstructured_types" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = $1 LIMIT 1", - "name": "GetAuthorById", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", + "name": "GetPostgresUnstructuredTypes", "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_json", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "json" }, - "originalName": "id" + "originalName": "c_json" }, { - "name": "name", - "notNull": true, + "name": "c_json_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "json" }, - "originalName": "name" + "originalName": "c_json_string_override" }, { - "name": "bio", + "name": "c_jsonb", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "jsonb" }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE($1, '%')", - "name": "GetAuthorByNamePattern", - "cmd": ":many", - "columns": [ + "originalName": "c_jsonb" + }, { - "name": "id", - "notNull": true, + "name": "c_jsonpath", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "bigserial" + "name": "jsonpath" }, - "originalName": "id" + "originalName": "c_jsonpath" }, { - "name": "name", - "notNull": true, + "name": "c_xml", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "name" + "originalName": "c_xml" }, { - "name": "bio", + "name": "c_xml_string_override", "length": -1, "table": { - "name": "authors" + "name": "postgres_unstructured_types" }, "type": { - "name": "text" + "name": "xml" }, - "originalName": "bio" + "originalName": "c_xml_string_override" } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_unstructured_types", + "name": "TruncatePostgresUnstructuredTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresArrayTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "c_bytea", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "bytea" }, - "originalName": "name" + "originalName": "c_bytea" } - } - ], - "filename": "query.sql" - }, - { - "text": "DELETE FROM authors\nWHERE name = $1", - "name": "DeleteAuthor", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_boolean_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "schema": "public", + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "pg_catalog.bool" }, - "originalName": "name" + "originalName": "c_boolean_array", + "arrayDims": 1 } - } - ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE authors CASCADE", - "name": "TruncateAuthors", - "cmd": ":exec", - "filename": "query.sql" - }, - { - "text": "UPDATE authors\nSET bio = $1\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "bio", + "name": "c_text_array", + "isArray": true, "length": -1, "table": { "schema": "public", - "name": "authors" + "name": "postgres_array_types" }, "type": { "name": "text" }, - "originalName": "bio" + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_date_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "date" + }, + "originalName": "c_date_array", + "arrayDims": 1 + } + }, + { + "number": 7, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 } } ], - "filename": "query.sql" + "comments": [ + " Array types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ANY($1::BIGINT [])", - "name": "GetAuthorsByIds", - "cmd": ":many", + "text": "SELECT c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array FROM postgres_array_types LIMIT 1", + "name": "GetPostgresArrayTypes", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "bytea" + }, + "originalName": "c_bytea" + }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "schema": "pg_catalog", + "name": "numeric" }, - "originalName": "id" + "originalName": "c_decimal_array", + "arrayDims": 1 }, { - "name": "name", - "notNull": true, + "name": "c_date_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "name": "date" }, - "originalName": "name" + "originalName": "c_date_array", + "arrayDims": 1 }, { - "name": "bio", + "name": "c_timestamp_array", + "isArray": true, "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "text" + "schema": "pg_catalog", + "name": "timestamp" }, - "originalName": "bio" + "originalName": "c_timestamp_array", + "arrayDims": 1 } ], + "filename": "query.sql" + }, + { + "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "name": "InsertPostgresArrayTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_bytea", "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "bytea" }, - "arrayDims": 1 + "originalName": "c_bytea" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_array_types" + } }, { - "text": "SELECT id, name, bio\nFROM authors\nWHERE id = ANY($1::BIGINT []) AND name = ANY($2::TEXT [])", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", + "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "name": "GetPostgresArrayTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_bytea", "length": -1, "table": { - "name": "authors" + "name": "postgres_array_types" }, "type": { - "name": "bigserial" + "name": "bytea" }, - "originalName": "id" + "originalName": "c_bytea" }, { - "name": "name", + "name": "cnt", "notNull": true, "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, + "isFuncCall": true, "type": { - "name": "text" - }, - "originalName": "bio" + "name": "bigint" + } } ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_array_types", + "name": "TruncatePostgresArrayTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "notNull": true, - "isArray": true, + "name": "c_point", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "schema": "pg_catalog", - "name": "int8" + "name": "point" }, - "arrayDims": 1 + "originalName": "c_point" } }, { "number": 2, "column": { - "notNull": true, - "isArray": true, + "name": "c_line", "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, "type": { - "name": "text" + "name": "line" }, - "arrayDims": 1 + "originalName": "c_line" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES ($1, $2) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "uuid" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 3, "column": { - "name": "name", - "notNull": true, + "name": "c_lseg", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "lseg" }, - "originalName": "name" + "originalName": "c_lseg" } }, { - "number": 2, + "number": 4, "column": { - "name": "author_id", - "notNull": true, + "name": "c_box", "length": -1, "table": { "schema": "public", - "name": "books" + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "box" }, - "originalName": "author_id" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "books" - } - }, - { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors\nINNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", - "columns": [ - { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "originalName": "c_box" } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 5, + "column": { + "name": "c_path", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 6, + "column": { + "name": "c_polygon", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" } }, { - "name": "authors", - "length": -1, - "type": {}, - "embedTable": { - "name": "authors" + "number": 7, + "column": { + "name": "c_circle", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } } ], - "filename": "query.sql" + "comments": [ + " Geometric types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_geometric_types" + } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = $1", - "name": "GetAuthorsByBookName", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "bigserial" - }, - "originalName": "id" - }, + "text": "INSERT INTO postgres_geometric_types (\n c_point, \n c_line, \n c_lseg, \n c_box, \n c_path, \n c_polygon, \n c_circle\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", + "name": "InsertPostgresGeoTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "name" + "number": 1, + "column": { + "name": "c_point", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "point" + }, + "originalName": "c_point" + } }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "text" - }, - "originalName": "bio" + "number": 2, + "column": { + "name": "c_line", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "line" + }, + "originalName": "c_line" + } }, { - "name": "books", - "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "number": 3, + "column": { + "name": "c_lseg", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_geometric_types" + }, + "type": { + "name": "lseg" + }, + "originalName": "c_lseg" } - } - ], - "parameters": [ + }, { - "number": 1, + "number": 4, "column": { - "name": "name", - "notNull": true, + "name": "c_box", "length": -1, "table": { - "name": "books" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "text" + "name": "box" }, - "originalName": "name" + "originalName": "c_box" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO extended.bios (author_name, name, bio_type) VALUES ($1, $2, $3)", - "name": "CreateExtendedBio", - "cmd": ":exec", - "parameters": [ + }, { - "number": 1, + "number": 5, "column": { - "name": "author_name", - "notNull": true, + "name": "c_path", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "path" }, - "originalName": "author_name" + "originalName": "c_path" } }, { - "number": 2, + "number": 6, "column": { - "name": "name", - "notNull": true, + "name": "c_polygon", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "pg_catalog.varchar" + "name": "polygon" }, - "originalName": "name" + "originalName": "c_polygon" } }, { - "number": 3, + "number": 7, "column": { - "name": "bio_type", + "name": "c_circle", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "schema": "public", + "name": "postgres_geometric_types" }, "type": { - "name": "extended.bio_type" + "name": "circle" }, - "originalName": "bio_type" + "originalName": "c_circle" } } ], "filename": "query.sql", "insert_into_table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" } }, { - "text": "SELECT author_name, name, bio_type FROM extended.bios WHERE bio_type = $1 LIMIT 1", - "name": "GetFirstExtendedBioByType", + "text": "SELECT c_point, c_line, c_lseg, c_box, c_path, c_polygon, c_circle FROM postgres_geometric_types LIMIT 1", + "name": "GetPostgresGeoTypes", "cmd": ":one", "columns": [ { - "name": "author_name", - "notNull": true, + "name": "c_point", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "point" }, - "originalName": "author_name" + "originalName": "c_point" }, { - "name": "name", - "notNull": true, + "name": "c_line", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "pg_catalog", - "name": "varchar" + "name": "line" }, - "originalName": "name" + "originalName": "c_line" }, { - "name": "bio_type", + "name": "c_lseg", "length": -1, "table": { - "schema": "extended", - "name": "bios" + "name": "postgres_geometric_types" }, "type": { - "schema": "extended", - "name": "bio_type" + "name": "lseg" }, - "originalName": "bio_type" - } - ], - "parameters": [ + "originalName": "c_lseg" + }, { - "number": 1, - "column": { - "name": "bio_type", - "length": -1, - "table": { - "schema": "extended", - "name": "bios" - }, - "type": { - "name": "extended.bio_type" - }, - "originalName": "bio_type" - } + "name": "c_box", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "box" + }, + "originalName": "c_box" + }, + { + "name": "c_path", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "path" + }, + "originalName": "c_path" + }, + { + "name": "c_polygon", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "polygon" + }, + "originalName": "c_polygon" + }, + { + "name": "c_circle", + "length": -1, + "table": { + "name": "postgres_geometric_types" + }, + "type": { + "name": "circle" + }, + "originalName": "c_circle" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE extended.bios", - "name": "TruncateExtendedBios", + "text": "TRUNCATE TABLE postgres_geometric_types", + "name": "TruncatePostgresGeoTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index cef8e0ba..80368996 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -1,9 +1,19 @@ н 2 -postgresql+examples/config/postgresql/types/schema.sql-examples/config/postgresql/authors/schema.sql"*examples/config/postgresql/types/query.sql",examples/config/postgresql/authors/query.sqlbх +postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner■ы public"└public▄ +./dist/LocalRunner■ы public"└publicГ + authors) +id0         R authorsb  bigserial& +name0         R authorsbtext# +bio0         R authorsbtext╡ +books" +id0         Rbooksbuuid$ +name0         Rbooksbtext5 + author_id0         Rbooksb +pg_catalogint8) + description0         Rbooksbtext▄ postgres_types< c_boolean0         Rpostgres_typesb pg_catalogbool7 @@ -82,17 +92,7 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircleГ - authors) -id0         R authorsb  bigserial& -name0         R authorsbtext# -bio0         R authorsbtext╡ -books" -id0         Rbooksbuuid$ -name0         Rbooksbtext5 - author_id0         Rbooksb -pg_catalogint8) - description0         Rbooksbtext" +c_circle0         Rpostgres_geometric_typesbcircle" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10221,7 +10221,110 @@ pg_catalogvarchar< name0         Rextendedbiosb pg_catalogvarchar= bio_type0         Rextendedbiosbextendedbio_type", -bio_type Autobiography BiographyMemoir╧ +bio_type Autobiography BiographyMemoirР +9SELECT id, name, bio FROM authors +WHERE name = $1 LIMIT 1 GetAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*0, +name0         R authorsbtextzname: query.sql║ +CSELECT id, name, bio +FROM authors +ORDER BY name +LIMIT $2 +OFFSET $1 ListAuthors:many"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*&" +offset0         8b integer*%! +limit0         8b integer: query.sqlн +OINSERT INTO authors (id, name, bio) VALUES ($1, $2, $3) RETURNING id, name, bio CreateAuthor:one"- +id0         R authorsb  bigserialzid", +name0         R authorsbtextzname"( +bio0         R authorsbtextzbio*95 +id0         Rpublicauthorsb  bigserialzid*84 +name0         Rpublicauthorsbtextzname*40 +bio0         Rpublicauthorsbtextzbio: query.sqlB authorsЦ +(); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_blob", args.CBlob); - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } - }; - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow - { - public int? CInteger { get; init; } - public decimal? CReal { get; init; } - public string? CText { get; init; } - public byte[]? CBlob { get; init; } - public required int Cnt { get; init; } - }; - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow - { - public int? MaxInteger { get; init; } - public required decimal MaxReal { get; init; } - public object? MaxText { get; init; } - }; - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); - } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -661,4 +520,145 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } + + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public class InsertSqliteTypesArgs + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + }; + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_blob", args.CBlob); + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public class InsertSqliteTypesBatchArgs + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + }; + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + }; + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow + { + public int? CInteger { get; init; } + public decimal? CReal { get; init; } + public string? CText { get; init; } + public byte[]? CBlob { get; init; } + public required int Cnt { get; init; } + }; + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow + { + public int? MaxInteger { get; init; } + public required decimal MaxReal { get; init; } + public object? MaxText { get; init; } + }; + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(DeleteAllSqliteTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); + } } \ No newline at end of file diff --git a/examples/SqliteDapperExample/request.json b/examples/SqliteDapperExample/request.json index 58364ca3..26e13959 100644 --- a/examples/SqliteDapperExample/request.json +++ b/examples/SqliteDapperExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/types/schema.sql", - "examples/config/sqlite/authors/schema.sql" + "examples/config/sqlite/authors/schema.sql", + "examples/config/sqlite/types/schema.sql" ], "queries": [ - "examples/config/sqlite/types/query.sql", - "examples/config/sqlite/authors/query.sql" + "examples/config/sqlite/authors/query.sql", + "examples/config/sqlite/types/query.sql" ], "codegen": { "out": "examples/SqliteDapperExample", @@ -27,54 +27,46 @@ "tables": [ { "rel": { - "name": "types_sqlite" + "name": "authors" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - } - }, - { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] }, { "rel": { - "name": "authors" + "name": "books" }, "columns": [ { @@ -82,7 +74,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -93,17 +85,28 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" } }, { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" + }, + "type": { + "name": "INTEGER" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" }, "type": { "name": "TEXT" @@ -113,50 +116,47 @@ }, { "rel": { - "name": "books" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" } }, { - "name": "description", + "name": "c_blob", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" } } ] @@ -167,287 +167,467 @@ }, "queries": [ { - "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" }, { - "number": 3, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "name": "integer" + } } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "name": "integer" + } } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" } }, { "number": 2, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { "number": 3, "column": { - "name": "c_text", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "authors" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + } }, { - "name": "c_real", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", "cmd": ":one", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_text", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_blob", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", "columns": [ { - "name": "max_integer", + "name": "id", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "max_real", + "name": "name", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "TEXT" + }, + "originalName": "name" }, { - "name": "max_text", + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { "name": "id", @@ -489,24 +669,26 @@ { "number": 1, "column": { - "name": "name", + "name": "ids", "notNull": true, "length": -1, + "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "isSqlcSlice": true, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", "cmd": ":many", "columns": [ { @@ -549,91 +731,66 @@ { "number": 1, "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } - }, - { - "number": 2, - "column": { - "name": "limit", + "name": "ids", "notNull": true, "length": -1, "isNamedParam": true, - "type": { - "name": "integer" - } - } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "INTEGER" }, + "isSqlcSlice": true, "originalName": "id" } }, { "number": 2, "column": { - "name": "name", + "name": "names", "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, + "isSqlcSlice": true, "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 3, + "number": 1, "column": { - "name": "bio", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql" }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", "cmd": ":execlastid", "columns": [ { @@ -641,7 +798,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -658,7 +815,7 @@ "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -669,159 +826,76 @@ { "number": 2, "column": { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "books" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "books", "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" + "type": {}, + "embedTable": { + "name": "books" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - }, - { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", "cmd": ":many", "columns": [ { @@ -858,17 +932,25 @@ "name": "TEXT" }, "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } } ], "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -880,368 +962,286 @@ "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "ids", - "notNull": true, + "name": "c_real", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_real" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "ids", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { - "number": 2, + "number": 3, "column": { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "types_sqlite" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "c_integer", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "c_integer" }, { - "name": "books", + "name": "c_real", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + }, { - "name": "authors", + "name": "c_text", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "TEXT" + }, + "originalName": "c_text" }, { - "name": "authors", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" }, { - "name": "books", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], - "parameters": [ + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "max_integer", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_real", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_text", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteDapperExample/request.message b/examples/SqliteDapperExample/request.message index 7c62d128b4a74e3b101037dd360382479840549a..199d63ac948437c58ae0c4b532dc6a74e21ec34f 100644 GIT binary patch delta 92 zcmaE4`^Z*+tDlvN(MX7`xG*QPBvndtqJa3s0HKL|noLR>U^d6Z)tnQPG&XMe$hf(P u*^F`WQl_&ky-ZvdlNSo(); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_text", args.CText); - queryParams.Add("c_blob", args.CBlob); - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - public int Cnt { get; set; } - }; - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow - { - public int? MaxInteger { get; set; } - public decimal MaxReal { get; set; } - public object MaxText { get; set; } - }; - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - await connection.ExecuteAsync(DeleteAllSqliteTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); - } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -662,5 +521,146 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); await this.Transaction.Connection.ExecuteAsync(DeleteAllAuthorsSql, transaction: this.Transaction); } + + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public class InsertSqliteTypesArgs + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_text", args.CText); + queryParams.Add("c_blob", args.CBlob); + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(InsertSqliteTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertSqliteTypesSql, queryParams, transaction: this.Transaction); + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public class InsertSqliteTypesBatchArgs + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + }; + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesSql, transaction: this.Transaction); + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + public int Cnt { get; set; } + }; + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteTypesCntSql, transaction: this.Transaction); + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow + { + public int? MaxInteger { get; set; } + public decimal MaxReal { get; set; } + public object MaxText { get; set; } + }; + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetSqliteFunctionsSql, transaction: this.Transaction); + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + await connection.ExecuteAsync(DeleteAllSqliteTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(DeleteAllSqliteTypesSql, transaction: this.Transaction); + } } } \ No newline at end of file diff --git a/examples/SqliteDapperLegacyExample/request.json b/examples/SqliteDapperLegacyExample/request.json index 859f4fa1..249c7805 100644 --- a/examples/SqliteDapperLegacyExample/request.json +++ b/examples/SqliteDapperLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/types/schema.sql", - "examples/config/sqlite/authors/schema.sql" + "examples/config/sqlite/authors/schema.sql", + "examples/config/sqlite/types/schema.sql" ], "queries": [ - "examples/config/sqlite/types/query.sql", - "examples/config/sqlite/authors/query.sql" + "examples/config/sqlite/authors/query.sql", + "examples/config/sqlite/types/query.sql" ], "codegen": { "out": "examples/SqliteDapperLegacyExample", @@ -27,54 +27,46 @@ "tables": [ { "rel": { - "name": "types_sqlite" + "name": "authors" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - } - }, - { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] }, { "rel": { - "name": "authors" + "name": "books" }, "columns": [ { @@ -82,7 +74,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -93,17 +85,28 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" } }, { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" + }, + "type": { + "name": "INTEGER" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" }, "type": { "name": "TEXT" @@ -113,50 +116,47 @@ }, { "rel": { - "name": "books" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" } }, { - "name": "description", + "name": "c_blob", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" } } ] @@ -167,287 +167,467 @@ }, "queries": [ { - "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" }, { - "number": 3, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "name": "integer" + } } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "name": "integer" + } } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" } }, { "number": 2, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { "number": 3, "column": { - "name": "c_text", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "authors" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + } }, { - "name": "c_real", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", "cmd": ":one", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_text", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_blob", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", "columns": [ { - "name": "max_integer", + "name": "id", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "max_real", + "name": "name", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "TEXT" + }, + "originalName": "name" }, { - "name": "max_text", + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { "name": "id", @@ -489,24 +669,26 @@ { "number": 1, "column": { - "name": "name", + "name": "ids", "notNull": true, "length": -1, + "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "isSqlcSlice": true, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", "cmd": ":many", "columns": [ { @@ -549,91 +731,66 @@ { "number": 1, "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } - }, - { - "number": 2, - "column": { - "name": "limit", + "name": "ids", "notNull": true, "length": -1, "isNamedParam": true, - "type": { - "name": "integer" - } - } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "INTEGER" }, + "isSqlcSlice": true, "originalName": "id" } }, { "number": 2, "column": { - "name": "name", + "name": "names", "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, + "isSqlcSlice": true, "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 3, + "number": 1, "column": { - "name": "bio", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql" }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", "cmd": ":execlastid", "columns": [ { @@ -641,7 +798,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -658,7 +815,7 @@ "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -669,159 +826,76 @@ { "number": 2, "column": { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "books" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "books", "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" + "type": {}, + "embedTable": { + "name": "books" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - }, - { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", "cmd": ":many", "columns": [ { @@ -858,17 +932,25 @@ "name": "TEXT" }, "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } } ], "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -880,368 +962,286 @@ "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "ids", - "notNull": true, + "name": "c_real", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_real" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "ids", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { - "number": 2, + "number": 3, "column": { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "types_sqlite" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "c_integer", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "c_integer" }, { - "name": "books", + "name": "c_real", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + }, { - "name": "authors", + "name": "c_text", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "TEXT" + }, + "originalName": "c_text" }, { - "name": "authors", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" }, { - "name": "books", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], - "parameters": [ + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "max_integer", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_real", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_text", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteDapperLegacyExample/request.message b/examples/SqliteDapperLegacyExample/request.message index a304f552c55c5f6cdf914e0bdd1e63bb5afd4d4e..1cbcf38dfa53ef46e3f408eef07d293c7b694349 100644 GIT binary patch delta 83 zcmZp%Yq1mHTFlDDXe7i|T$qztk}9P+Q9yiRfY3xfO(rD`Fq>oIYR-vC8XLE;GHq^S lHe;N;lj-c{``i|StWtfQ9xXvq_QBjSid+qBQ-Zs4=6E_Pn$_e4Jb2l xDgVSIt&Lk)nI^wyKD&7*lNsaYucAuqtWv#9To#i9c@;N5WDyqJEH3+&5dc>|9h3k7 diff --git a/examples/SqliteExample/Models.cs b/examples/SqliteExample/Models.cs index 54c720b5..e266ada0 100644 --- a/examples/SqliteExample/Models.cs +++ b/examples/SqliteExample/Models.cs @@ -2,6 +2,6 @@ using System.Linq; namespace SqliteExampleGen; -public readonly record struct TypesSqlite(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); public readonly record struct Author(int Id, string Name, string? Bio); -public readonly record struct Book(int Id, string Name, int AuthorId, string? Description); \ No newline at end of file +public readonly record struct Book(int Id, string Name, int AuthorId, string? Description); +public readonly record struct TypesSqlite(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); \ No newline at end of file diff --git a/examples/SqliteExample/QuerySql.cs b/examples/SqliteExample/QuerySql.cs index c2b96803..9a9d7ce2 100644 --- a/examples/SqliteExample/QuerySql.cs +++ b/examples/SqliteExample/QuerySql.cs @@ -34,253 +34,6 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction? Transaction { get; } private string? ConnectionString { get; } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; - public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) - { - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertSqliteTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public readonly record struct InsertSqliteTypesBatchArgs(int? CInteger, decimal? CReal, string? CText); - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public readonly record struct GetSqliteTypesRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - - return null; - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - - return null; - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteFunctionsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - - return null; - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = DeleteAllSqliteTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public readonly record struct GetAuthorRow(int Id, string Name, string? Bio); public readonly record struct GetAuthorArgs(string Name); @@ -928,4 +681,251 @@ public async Task DeleteAllAuthors() await command.ExecuteNonQueryAsync(); } } + + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public readonly record struct InsertSqliteTypesArgs(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) + { + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertSqliteTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public readonly record struct InsertSqliteTypesBatchArgs(int? CInteger, decimal? CReal, string? CText); + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public readonly record struct GetSqliteTypesRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob); + public async Task GetSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } + } + } + + return null; + } + + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public readonly record struct GetSqliteTypesCntRow(int? CInteger, decimal? CReal, string? CText, byte[]? CBlob, int Cnt); + public async Task GetSqliteTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } + } + } + + return null; + } + + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public readonly record struct GetSqliteFunctionsRow(int? MaxInteger, decimal MaxReal, object? MaxText); + public async Task GetSqliteFunctions() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetSqliteFunctionsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } + } + } + + return null; + } + + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAllSqliteTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } } \ No newline at end of file diff --git a/examples/SqliteExample/request.json b/examples/SqliteExample/request.json index a150f65e..6c5ae56a 100644 --- a/examples/SqliteExample/request.json +++ b/examples/SqliteExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/types/schema.sql", - "examples/config/sqlite/authors/schema.sql" + "examples/config/sqlite/authors/schema.sql", + "examples/config/sqlite/types/schema.sql" ], "queries": [ - "examples/config/sqlite/types/query.sql", - "examples/config/sqlite/authors/query.sql" + "examples/config/sqlite/authors/query.sql", + "examples/config/sqlite/types/query.sql" ], "codegen": { "out": "examples/SqliteExample", @@ -27,54 +27,46 @@ "tables": [ { "rel": { - "name": "types_sqlite" + "name": "authors" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - } - }, - { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] }, { "rel": { - "name": "authors" + "name": "books" }, "columns": [ { @@ -82,7 +74,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -93,17 +85,28 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" } }, { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" + }, + "type": { + "name": "INTEGER" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" }, "type": { "name": "TEXT" @@ -113,50 +116,47 @@ }, { "rel": { - "name": "books" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" } }, { - "name": "description", + "name": "c_blob", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" } } ] @@ -167,287 +167,467 @@ }, "queries": [ { - "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" }, { - "number": 3, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "name": "integer" + } } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "name": "integer" + } } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" } }, { "number": 2, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { "number": 3, "column": { - "name": "c_text", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "authors" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + } }, { - "name": "c_real", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", "cmd": ":one", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_text", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_blob", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", "columns": [ { - "name": "max_integer", + "name": "id", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "max_real", + "name": "name", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "TEXT" + }, + "originalName": "name" }, { - "name": "max_text", + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { "name": "id", @@ -489,24 +669,26 @@ { "number": 1, "column": { - "name": "name", + "name": "ids", "notNull": true, "length": -1, + "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "isSqlcSlice": true, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", "cmd": ":many", "columns": [ { @@ -549,91 +731,66 @@ { "number": 1, "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } - }, - { - "number": 2, - "column": { - "name": "limit", + "name": "ids", "notNull": true, "length": -1, "isNamedParam": true, - "type": { - "name": "integer" - } - } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "INTEGER" }, + "isSqlcSlice": true, "originalName": "id" } }, { "number": 2, "column": { - "name": "name", + "name": "names", "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, + "isSqlcSlice": true, "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 3, + "number": 1, "column": { - "name": "bio", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql" }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", "cmd": ":execlastid", "columns": [ { @@ -641,7 +798,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -658,7 +815,7 @@ "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -669,159 +826,76 @@ { "number": 2, "column": { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "books" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "books", "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" + "type": {}, + "embedTable": { + "name": "books" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - }, - { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", "cmd": ":many", "columns": [ { @@ -858,17 +932,25 @@ "name": "TEXT" }, "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } } ], "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -880,368 +962,286 @@ "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "ids", - "notNull": true, + "name": "c_real", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_real" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "ids", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { - "number": 2, + "number": 3, "column": { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "types_sqlite" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "c_integer", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "c_integer" }, { - "name": "books", + "name": "c_real", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + }, { - "name": "authors", + "name": "c_text", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "TEXT" + }, + "originalName": "c_text" }, { - "name": "authors", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" }, { - "name": "books", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], - "parameters": [ + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "max_integer", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_real", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_text", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteExample/request.message b/examples/SqliteExample/request.message index c197954552acfebf6c6b697ae8f2e1762b42d2f4..f923db5f22f0bb7b9b9bdc84ba3f3b200b1e0aae 100644 GIT binary patch delta 73 zcmca)d&yRStA&+|(MX7`xG*QPBvndtqJa3s0HKL|noLR>U^d6Z)tnQPG&XK|#<)41 b*^F`WET*%Y4{%!uZl1%U%)a@D%q>O$HZvG5 delta 93 zcmca)d&yRStA&+|(MX7`xG*QPBvnd%qJX$SNo7H5v3_xKMrv-N9#CQ;pEi?{8c=59 wQvQiaS{t`KW1M_}`RwLdOlFKMeau`zle5JoH-8poW#2r9#Zqwd51Ctx0DguaT>t<8 diff --git a/examples/SqliteLegacyExample/Models.cs b/examples/SqliteLegacyExample/Models.cs index c1aa7db5..b27dbdf1 100644 --- a/examples/SqliteLegacyExample/Models.cs +++ b/examples/SqliteLegacyExample/Models.cs @@ -3,13 +3,6 @@ namespace SqliteLegacyExampleGen { using System.Linq; - public class TypesSqlite - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; public class Author { public int Id { get; set; } @@ -23,4 +16,11 @@ public class Book public int AuthorId { get; set; } public string Description { get; set; } }; + public class TypesSqlite + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; } \ No newline at end of file diff --git a/examples/SqliteLegacyExample/QuerySql.cs b/examples/SqliteLegacyExample/QuerySql.cs index 13b7c294..2892f905 100644 --- a/examples/SqliteLegacyExample/QuerySql.cs +++ b/examples/SqliteLegacyExample/QuerySql.cs @@ -35,282 +35,6 @@ public static QuerySql WithTransaction(SqliteTransaction transaction) private SqliteTransaction Transaction { get; } private string ConnectionString { get; } - private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; - public class InsertSqliteTypesArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; - public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) - { - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertSqliteTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; - public class InsertSqliteTypesBatchArgs - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - }; - public async Task InsertSqliteTypesBatch(List args) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); - using (var command = new SqliteCommand(transformedSql, connection)) - { - for (int i = 0; i < args.Count; i++) - { - command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); - } - - await command.ExecuteScalarAsync(); - } - } - } - - private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; - public class GetSqliteTypesRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - }; - public async Task GetSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) - }; - } - } - } - - return null; - } - - private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; - public class GetSqliteTypesCntRow - { - public int? CInteger { get; set; } - public decimal? CReal { get; set; } - public string CText { get; set; } - public byte[] CBlob { get; set; } - public int Cnt { get; set; } - }; - public async Task GetSqliteTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteTypesCntRow - { - CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), - CText = reader.IsDBNull(2) ? null : reader.GetString(2), - CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), - Cnt = reader.GetInt32(4) - }; - } - } - } - - return null; - } - - private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; - public class GetSqliteFunctionsRow - { - public int? MaxInteger { get; set; } - public decimal MaxReal { get; set; } - public object MaxText { get; set; } - }; - public async Task GetSqliteFunctions() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetSqliteFunctionsSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetSqliteFunctionsRow - { - MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), - MaxReal = reader.GetDecimal(1), - MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) - }; - } - } - } - - return null; - } - - private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; - public async Task DeleteAllSqliteTypes() - { - if (this.Transaction == null) - { - using (var connection = new SqliteConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = DeleteAllSqliteTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - private const string GetAuthorSql = "SELECT id, name, bio FROM authors WHERE name = @name LIMIT 1"; public class GetAuthorRow { @@ -797,9 +521,219 @@ public async Task> GetAuthorsByIdsAndNames(GetA command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = transformedSql; + command.Transaction = this.Transaction; + for (int i = 0; i < args.Ids.Length; i++) + command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); + for (int i = 0; i < args.Names.Length; i++) + command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + return result; + } + } + } + + private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; + public class DeleteAuthorArgs + { + public string Name { get; set; } + }; + public async Task DeleteAuthor(DeleteAuthorArgs args) + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(DeleteAuthorSql, connection)) + { + command.Parameters.AddWithValue("@name", args.Name); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = DeleteAuthorSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + await command.ExecuteNonQueryAsync(); + } + } + + private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; + public class CreateBookRow + { + public int Id { get; set; } + }; + public class CreateBookArgs + { + public string Name { get; set; } + public int AuthorId { get; set; } + }; + public async Task CreateBook(CreateBookArgs args) + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(CreateBookSql, connection)) + { + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt32(result); + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = CreateBookSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@name", args.Name); + command.Parameters.AddWithValue("@author_id", args.AuthorId); + var result = await command.ExecuteScalarAsync(); + return Convert.ToInt32(result); + } + } + + private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; + public class ListAllAuthorsBooksRow + { + public Author Author { get; set; } + public Book Book { get; set; } + }; + public async Task> ListAllAuthorsBooks() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(ListAllAuthorsBooksSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = ListAllAuthorsBooksSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); + return result; + } + } + } + + private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; + public class GetDuplicateAuthorsRow + { + public Author Author { get; set; } + public Author Author2 { get; set; } + }; + public async Task> GetDuplicateAuthors() + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetDuplicateAuthorsSql, connection)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetDuplicateAuthorsSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); + while (await reader.ReadAsync()) + result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); + return result; + } + } + } + + private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; + public class GetAuthorsByBookNameRow + { + public int Id { get; set; } + public string Name { get; set; } + public string Bio { get; set; } + public Book Book { get; set; } + }; + public class GetAuthorsByBookNameArgs + { + public string Name { get; set; } + }; + public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + { + if (this.Transaction == null) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var command = new SqliteCommand(GetAuthorsByBookNameSql, connection)) + { + command.Parameters.AddWithValue("@name", args.Name); + using (var reader = await command.ExecuteReaderAsync()) + { + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); return result; } } @@ -810,37 +744,29 @@ public async Task> GetAuthorsByIdsAndNames(GetA throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = transformedSql; + command.CommandText = GetAuthorsByBookNameSql; command.Transaction = this.Transaction; - for (int i = 0; i < args.Ids.Length; i++) - command.Parameters.AddWithValue($"@idsArg{i}", args.Ids[i]); - for (int i = 0; i < args.Names.Length; i++) - command.Parameters.AddWithValue($"@namesArg{i}", args.Names[i]); + command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); + var result = new List(); while (await reader.ReadAsync()) - result.Add(new GetAuthorsByIdsAndNamesRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }); + result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); return result; } } } - private const string DeleteAuthorSql = "DELETE FROM authors WHERE name = @name"; - public class DeleteAuthorArgs - { - public string Name { get; set; } - }; - public async Task DeleteAuthor(DeleteAuthorArgs args) + private const string DeleteAllAuthorsSql = "DELETE FROM authors"; + public async Task DeleteAllAuthors() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAuthorSql, connection)) + using (var command = new SqliteCommand(DeleteAllAuthorsSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } @@ -852,196 +778,270 @@ public async Task DeleteAuthor(DeleteAuthorArgs args) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAuthorSql; + command.CommandText = DeleteAllAuthorsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); await command.ExecuteNonQueryAsync(); } } - private const string CreateBookSql = "INSERT INTO books (name, author_id) VALUES (@name, @author_id) RETURNING id"; - public class CreateBookRow - { - public int Id { get; set; } - }; - public class CreateBookArgs + private const string InsertSqliteTypesSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text, c_blob) VALUES (@c_integer, @c_real, @c_text, @c_blob)"; + public class InsertSqliteTypesArgs { - public string Name { get; set; } - public int AuthorId { get; set; } + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } }; - public async Task CreateBook(CreateBookArgs args) + public async Task InsertSqliteTypes(InsertSqliteTypesArgs args) { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(CreateBookSql, connection)) + using (var command = new SqliteCommand(InsertSqliteTypesSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt32(result); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } + + return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = CreateBookSql; + command.CommandText = InsertSqliteTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); - command.Parameters.AddWithValue("@author_id", args.AuthorId); - var result = await command.ExecuteScalarAsync(); - return Convert.ToInt32(result); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_text", args.CText ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_blob", args.CBlob ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); } } - private const string ListAllAuthorsBooksSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id ORDER BY authors.name"; - public class ListAllAuthorsBooksRow + private const string InsertSqliteTypesBatchSql = "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (@c_integer, @c_real, @c_text)"; + public class InsertSqliteTypesBatchArgs { - public Author Author { get; set; } - public Book Book { get; set; } + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } }; - public async Task> ListAllAuthorsBooks() + public async Task InsertSqliteTypesBatch(List args) + { + using (var connection = new SqliteConnection(ConnectionString)) + { + await connection.OpenAsync(); + var transformedSql = Utils.TransformQueryForSqliteBatch(InsertSqliteTypesBatchSql, args.Count); + using (var command = new SqliteCommand(transformedSql, connection)) + { + for (int i = 0; i < args.Count; i++) + { + command.Parameters.AddWithValue($"@c_integer{i}", args[i].CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_real{i}", args[i].CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue($"@c_text{i}", args[i].CText ?? (object)DBNull.Value); + } + + await command.ExecuteScalarAsync(); + } + } + } + + private const string GetSqliteTypesSql = "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1"; + public class GetSqliteTypesRow + { + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + }; + public async Task GetSqliteTypes() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(ListAllAuthorsBooksSql, connection)) + using (var command = new SqliteCommand(GetSqliteTypesSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = ListAllAuthorsBooksSql; + command.CommandText = GetSqliteTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new ListAllAuthorsBooksRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteTypesRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3) + }; + } } } + + return null; } - private const string GetDuplicateAuthorsSql = "SELECT authors1.id, authors1.name, authors1.bio, authors2.id, authors2.name, authors2.bio FROM authors AS authors1 INNER JOIN authors AS authors2 ON authors1.name = authors2.name WHERE authors1.id < authors2.id"; - public class GetDuplicateAuthorsRow + private const string GetSqliteTypesCntSql = "SELECT c_integer, c_real, c_text, c_blob, COUNT(*) AS cnt FROM types_sqlite GROUP BY c_integer, c_real, c_text, c_blob LIMIT 1"; + public class GetSqliteTypesCntRow { - public Author Author { get; set; } - public Author Author2 { get; set; } + public int? CInteger { get; set; } + public decimal? CReal { get; set; } + public string CText { get; set; } + public byte[] CBlob { get; set; } + public int Cnt { get; set; } }; - public async Task> GetDuplicateAuthors() + public async Task GetSqliteTypesCnt() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(GetDuplicateAuthorsSql, connection)) + using (var command = new SqliteCommand(GetSqliteTypesCntSql, connection)) { using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetDuplicateAuthorsSql; + command.CommandText = GetSqliteTypesCntSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetDuplicateAuthorsRow { Author = new Author { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2) }, Author2 = new Author { Id = reader.GetInt32(3), Name = reader.GetString(4), Bio = reader.IsDBNull(5) ? null : reader.GetString(5) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteTypesCntRow + { + CInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + CReal = reader.IsDBNull(1) ? (decimal? )null : reader.GetDecimal(1), + CText = reader.IsDBNull(2) ? null : reader.GetString(2), + CBlob = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + Cnt = reader.GetInt32(4) + }; + } } } + + return null; } - private const string GetAuthorsByBookNameSql = "SELECT authors.id, authors.name, authors.bio, books.id, books.name, books.author_id, books.description FROM authors INNER JOIN books ON authors.id = books.author_id WHERE books.name = @name"; - public class GetAuthorsByBookNameRow - { - public int Id { get; set; } - public string Name { get; set; } - public string Bio { get; set; } - public Book Book { get; set; } - }; - public class GetAuthorsByBookNameArgs + private const string GetSqliteFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_real) AS max_real, MAX(c_text) AS max_text FROM types_sqlite"; + public class GetSqliteFunctionsRow { - public string Name { get; set; } + public int? MaxInteger { get; set; } + public decimal MaxReal { get; set; } + public object MaxText { get; set; } }; - public async Task> GetAuthorsByBookName(GetAuthorsByBookNameArgs args) + public async Task GetSqliteFunctions() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(GetAuthorsByBookNameSql, connection)) + using (var command = new SqliteCommand(GetSqliteFunctionsSql, connection)) { - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } } } } + + return null; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetAuthorsByBookNameSql; + command.CommandText = GetSqliteFunctionsSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@name", args.Name); using (var reader = await command.ExecuteReaderAsync()) { - var result = new List(); - while (await reader.ReadAsync()) - result.Add(new GetAuthorsByBookNameRow { Id = reader.GetInt32(0), Name = reader.GetString(1), Bio = reader.IsDBNull(2) ? null : reader.GetString(2), Book = new Book { Id = reader.GetInt32(3), Name = reader.GetString(4), AuthorId = reader.GetInt32(5), Description = reader.IsDBNull(6) ? null : reader.GetString(6) } }); - return result; + if (await reader.ReadAsync()) + { + return new GetSqliteFunctionsRow + { + MaxInteger = reader.IsDBNull(0) ? (int? )null : reader.GetInt32(0), + MaxReal = reader.GetDecimal(1), + MaxText = reader.IsDBNull(2) ? null : reader.GetValue(2) + }; + } } } + + return null; } - private const string DeleteAllAuthorsSql = "DELETE FROM authors"; - public async Task DeleteAllAuthors() + private const string DeleteAllSqliteTypesSql = "DELETE FROM types_sqlite"; + public async Task DeleteAllSqliteTypes() { if (this.Transaction == null) { using (var connection = new SqliteConnection(ConnectionString)) { await connection.OpenAsync(); - using (var command = new SqliteCommand(DeleteAllAuthorsSql, connection)) + using (var command = new SqliteCommand(DeleteAllSqliteTypesSql, connection)) { await command.ExecuteNonQueryAsync(); } @@ -1054,7 +1054,7 @@ public async Task DeleteAllAuthors() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = DeleteAllAuthorsSql; + command.CommandText = DeleteAllSqliteTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } diff --git a/examples/SqliteLegacyExample/request.json b/examples/SqliteLegacyExample/request.json index 700c30b4..0bdcc6fe 100644 --- a/examples/SqliteLegacyExample/request.json +++ b/examples/SqliteLegacyExample/request.json @@ -3,12 +3,12 @@ "version": "2", "engine": "sqlite", "schema": [ - "examples/config/sqlite/types/schema.sql", - "examples/config/sqlite/authors/schema.sql" + "examples/config/sqlite/authors/schema.sql", + "examples/config/sqlite/types/schema.sql" ], "queries": [ - "examples/config/sqlite/types/query.sql", - "examples/config/sqlite/authors/query.sql" + "examples/config/sqlite/authors/query.sql", + "examples/config/sqlite/types/query.sql" ], "codegen": { "out": "examples/SqliteLegacyExample", @@ -27,54 +27,46 @@ "tables": [ { "rel": { - "name": "types_sqlite" + "name": "authors" }, "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" } }, { - "name": "c_real", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - } - }, - { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" } }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" } } ] }, { "rel": { - "name": "authors" + "name": "books" }, "columns": [ { @@ -82,7 +74,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -93,17 +85,28 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" } }, { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" + }, + "type": { + "name": "INTEGER" + } + }, + { + "name": "description", + "length": -1, + "table": { + "name": "books" }, "type": { "name": "TEXT" @@ -113,50 +116,47 @@ }, { "rel": { - "name": "books" + "name": "types_sqlite" }, "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { "name": "INTEGER" } }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" } }, { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" } }, { - "name": "description", + "name": "c_blob", "length": -1, "table": { - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" } } ] @@ -167,287 +167,467 @@ }, "queries": [ { - "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", - "name": "InsertSqliteTypes", - "cmd": ":exec", + "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", + "name": "GetAuthor", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + }, + { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + }, + { + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "c_integer" + "originalName": "name" } + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", + "name": "ListAuthors", + "cmd": ":many", + "columns": [ + { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" }, { - "number": 2, - "column": { - "name": "c_real", - "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, - "type": { - "name": "REAL" - }, - "originalName": "c_real" - } + "name": "name", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" }, { - "number": 3, + "name": "bio", + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, "column": { - "name": "c_text", + "name": "offset", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "TEXT" - }, - "originalName": "c_text" + "name": "integer" + } } }, { - "number": 4, + "number": 2, "column": { - "name": "c_blob", + "name": "limit", + "notNull": true, "length": -1, - "table": { - "schema": "main", - "name": "types_sqlite" - }, + "isNamedParam": true, "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "name": "integer" + } } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "types_sqlite" - } + "filename": "query.sql" }, { - "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", - "name": "InsertSqliteTypesBatch", - "cmd": ":copyfrom", + "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", + "name": "CreateAuthor", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" } }, { "number": 2, "column": { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" } }, { "number": 3, "column": { - "name": "c_text", + "name": "bio", "length": -1, "table": { "schema": "main", - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "bio" } } ], "filename": "query.sql", "insert_into_table": { - "name": "types_sqlite" + "name": "authors" } }, { - "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", - "name": "GetSqliteTypes", - "cmd": ":one", + "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", + "name": "CreateAuthorReturnId", + "cmd": ":execlastid", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name", + "notNull": true, + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" + } }, { - "name": "c_real", + "number": 2, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "authors" + } + }, + { + "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", + "name": "GetAuthorById", + "cmd": ":one", + "columns": [ + { + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "INTEGER" }, - "originalName": "c_real" + "originalName": "id" }, { - "name": "c_text", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" + "originalName": "name" }, { - "name": "c_blob", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "BLOB" + "name": "TEXT" }, - "originalName": "c_blob" + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "id", + "notNull": true, + "length": -1, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } } ], "filename": "query.sql" }, { - "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", - "name": "GetSqliteTypesCnt", + "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", + "name": "GetAuthorByIdWithMultipleNamedParam", "cmd": ":one", "columns": [ { - "name": "c_integer", + "name": "id", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "INTEGER" }, - "originalName": "c_integer" + "originalName": "id" }, { - "name": "c_real", + "name": "name", + "notNull": true, "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { - "name": "REAL" + "name": "TEXT" }, - "originalName": "c_real" + "originalName": "name" }, { - "name": "c_text", + "name": "bio", "length": -1, "table": { - "name": "types_sqlite" + "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "c_text" - }, + "originalName": "bio" + } + ], + "parameters": [ { - "name": "c_blob", - "length": -1, - "table": { - "name": "types_sqlite" - }, - "type": { - "name": "BLOB" - }, - "originalName": "c_blob" + "number": 1, + "column": { + "name": "id_arg", + "notNull": true, + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "id" + } }, { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "integer" + "number": 2, + "column": { + "name": "take", + "length": -1, + "isNamedParam": true, + "type": { + "name": "integer" + } } } ], "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", - "name": "GetSqliteFunctions", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", + "name": "GetAuthorByNamePattern", + "cmd": ":many", "columns": [ { - "name": "max_integer", + "name": "id", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "INTEGER" + }, + "originalName": "id" }, { - "name": "max_real", + "name": "name", + "notNull": true, "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" - } + "name": "TEXT" + }, + "originalName": "name" }, { - "name": "max_text", + "name": "bio", "length": -1, - "isFuncCall": true, + "table": { + "name": "authors" + }, "type": { - "name": "any" + "name": "TEXT" + }, + "originalName": "bio" + } + ], + "parameters": [ + { + "number": 1, + "column": { + "name": "name_pattern", + "length": -1, + "isNamedParam": true, + "table": { + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "name" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM types_sqlite", - "name": "DeleteAllSqliteTypes", - "cmd": ":exec", + "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", + "name": "UpdateAuthors", + "cmd": ":execrows", + "parameters": [ + { + "number": 1, + "column": { + "name": "bio", + "length": -1, + "table": { + "schema": "main", + "name": "authors" + }, + "type": { + "name": "TEXT" + }, + "originalName": "bio" + } + } + ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name = ? LIMIT 1", - "name": "GetAuthor", - "cmd": ":one", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", + "name": "GetAuthorsByIds", + "cmd": ":many", "columns": [ { "name": "id", @@ -489,24 +669,26 @@ { "number": 1, "column": { - "name": "name", + "name": "ids", "notNull": true, "length": -1, + "isNamedParam": true, "table": { "name": "authors" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "isSqlcSlice": true, + "originalName": "id" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio\nFROM authors\nORDER BY name\nLIMIT ?2 OFFSET ?1", - "name": "ListAuthors", + "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", + "name": "GetAuthorsByIdsAndNames", "cmd": ":many", "columns": [ { @@ -549,91 +731,66 @@ { "number": 1, "column": { - "name": "offset", - "notNull": true, - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } - } - }, - { - "number": 2, - "column": { - "name": "limit", + "name": "ids", "notNull": true, "length": -1, "isNamedParam": true, - "type": { - "name": "integer" - } - } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO authors (id, name, bio) VALUES (?, ?, ?)", - "name": "CreateAuthor", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "INTEGER" }, + "isSqlcSlice": true, "originalName": "id" } }, { "number": 2, "column": { - "name": "name", + "name": "names", "notNull": true, "length": -1, + "isNamedParam": true, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, + "isSqlcSlice": true, "originalName": "name" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "DELETE FROM authors\nWHERE name = ?", + "name": "DeleteAuthor", + "cmd": ":exec", + "parameters": [ { - "number": 3, + "number": 1, "column": { - "name": "bio", + "name": "name", + "notNull": true, "length": -1, "table": { - "schema": "main", "name": "authors" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "name" } } ], - "filename": "query.sql", - "insert_into_table": { - "name": "authors" - } + "filename": "query.sql" }, { - "text": "INSERT INTO authors (name, bio) VALUES (?, ?) RETURNING id", - "name": "CreateAuthorReturnId", + "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", + "name": "CreateBook", "cmd": ":execlastid", "columns": [ { @@ -641,7 +798,7 @@ "notNull": true, "length": -1, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "INTEGER" @@ -658,7 +815,7 @@ "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -669,159 +826,76 @@ { "number": 2, "column": { - "name": "bio", + "name": "author_id", + "notNull": true, "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "books" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "author_id" } } ], "filename": "query.sql", "insert_into_table": { - "name": "authors" + "name": "books" } }, { - "text": "SELECT id, name, bio FROM authors\nWHERE id = ? LIMIT 1", - "name": "GetAuthorById", - "cmd": ":one", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", + "name": "ListAllAuthorsBooks", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "books", "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" + "type": {}, + "embedTable": { + "name": "books" } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors WHERE id = ?1 AND id = ?1 LIMIT ?2", - "name": "GetAuthorByIdWithMultipleNamedParam", - "cmd": ":one", + "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", + "name": "GetDuplicateAuthors", + "cmd": ":many", "columns": [ { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + } }, { - "name": "bio", + "name": "authors", "length": -1, - "table": { + "type": {}, + "embedTable": { "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, - "column": { - "name": "id_arg", - "notNull": true, - "length": -1, - "isNamedParam": true, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - }, - { - "number": 2, - "column": { - "name": "take", - "length": -1, - "isNamedParam": true, - "type": { - "name": "integer" - } } } ], "filename": "query.sql" }, { - "text": "SELECT id, name, bio FROM authors\nWHERE name LIKE COALESCE(?1, '%')", - "name": "GetAuthorByNamePattern", + "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", + "name": "GetAuthorsByBookName", "cmd": ":many", "columns": [ { @@ -858,17 +932,25 @@ "name": "TEXT" }, "originalName": "bio" + }, + { + "name": "books", + "length": -1, + "type": {}, + "embedTable": { + "name": "books" + } } ], "parameters": [ { "number": 1, "column": { - "name": "name_pattern", + "name": "name", + "notNull": true, "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "name": "books" }, "type": { "name": "TEXT" @@ -880,368 +962,286 @@ "filename": "query.sql" }, { - "text": "UPDATE authors\nSET bio = ?\nWHERE bio IS NOT NULL", - "name": "UpdateAuthors", - "cmd": ":execrows", + "text": "DELETE FROM authors", + "name": "DeleteAllAuthors", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "INSERT INTO types_sqlite \n(c_integer, c_real, c_text, c_blob) \nVALUES (?, ?, ?, ?)", + "name": "InsertSqliteTypes", + "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "bio", + "name": "c_integer", "length": -1, "table": { "schema": "main", - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "bio" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?)", - "name": "GetAuthorsByIds", - "cmd": ":many", - "columns": [ + }, { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" - }, - { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 2, "column": { - "name": "ids", - "notNull": true, + "name": "c_real", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "REAL" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_real" } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT id, name, bio FROM authors WHERE id IN (/*SLICE:ids*/?) AND name IN (/*SLICE:names*/?)", - "name": "GetAuthorsByIdsAndNames", - "cmd": ":many", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - }, - { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" }, { - "name": "bio", - "length": -1, - "table": { - "name": "authors" - }, - "type": { - "name": "TEXT" - }, - "originalName": "bio" - } - ], - "parameters": [ - { - "number": 1, + "number": 3, "column": { - "name": "ids", - "notNull": true, + "name": "c_text", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "isSqlcSlice": true, - "originalName": "id" + "originalName": "c_text" } }, { - "number": 2, + "number": 4, "column": { - "name": "names", - "notNull": true, + "name": "c_blob", "length": -1, - "isNamedParam": true, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "BLOB" }, - "isSqlcSlice": true, - "originalName": "name" + "originalName": "c_blob" } } ], - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "types_sqlite" + } }, { - "text": "DELETE FROM authors\nWHERE name = ?", - "name": "DeleteAuthor", - "cmd": ":exec", + "text": "INSERT INTO types_sqlite (c_integer, c_real, c_text) VALUES (?, ?, ?)", + "name": "InsertSqliteTypesBatch", + "cmd": ":copyfrom", "parameters": [ { "number": 1, "column": { - "name": "name", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "schema": "main", + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "INTEGER" }, - "originalName": "name" + "originalName": "c_integer" } - } - ], - "filename": "query.sql" - }, - { - "text": "INSERT INTO books (name, author_id) VALUES (?, ?) RETURNING id", - "name": "CreateBook", - "cmd": ":execlastid", - "columns": [ - { - "name": "id", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "INTEGER" - }, - "originalName": "id" - } - ], - "parameters": [ + }, { - "number": 1, + "number": 2, "column": { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" } }, { - "number": 2, + "number": 3, "column": { - "name": "author_id", - "notNull": true, + "name": "c_text", "length": -1, "table": { "schema": "main", - "name": "books" + "name": "types_sqlite" }, "type": { - "name": "INTEGER" + "name": "TEXT" }, - "originalName": "author_id" + "originalName": "c_text" } } ], "filename": "query.sql", "insert_into_table": { - "name": "books" + "name": "types_sqlite" } }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nORDER BY authors.name", - "name": "ListAllAuthorsBooks", - "cmd": ":many", + "text": "SELECT c_integer, c_real, c_text, c_blob FROM types_sqlite LIMIT 1", + "name": "GetSqliteTypes", + "cmd": ":one", "columns": [ { - "name": "authors", + "name": "c_integer", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "INTEGER" + }, + "originalName": "c_integer" }, { - "name": "books", + "name": "c_real", "length": -1, - "type": {}, - "embedTable": { - "name": "books" - } - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n authors1.id, authors1.name, authors1.bio,\n authors2.id, authors2.name, authors2.bio\nFROM authors AS authors1\nINNER JOIN authors AS authors2 ON authors1.name = authors2.name\nWHERE authors1.id \u003c authors2.id", - "name": "GetDuplicateAuthors", - "cmd": ":many", - "columns": [ + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "REAL" + }, + "originalName": "c_real" + }, { - "name": "authors", + "name": "c_text", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "TEXT" + }, + "originalName": "c_text" }, { - "name": "authors", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "authors" - } + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" } ], "filename": "query.sql" }, { - "text": "SELECT\n authors.id, authors.name, authors.bio,\n books.id, books.name, books.author_id, books.description\nFROM authors INNER JOIN books ON authors.id = books.author_id\nWHERE books.name = ?", - "name": "GetAuthorsByBookName", - "cmd": ":many", + "text": "SELECT\n c_integer,\n c_real,\n c_text,\n c_blob,\n COUNT(*) AS cnt\nFROM types_sqlite\nGROUP BY c_integer, c_real, c_text, c_blob\nLIMIT 1", + "name": "GetSqliteTypesCnt", + "cmd": ":one", "columns": [ { - "name": "id", - "notNull": true, + "name": "c_integer", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "INTEGER" }, - "originalName": "id" + "originalName": "c_integer" }, { - "name": "name", - "notNull": true, + "name": "c_real", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { - "name": "TEXT" + "name": "REAL" }, - "originalName": "name" + "originalName": "c_real" }, { - "name": "bio", + "name": "c_text", "length": -1, "table": { - "name": "authors" + "name": "types_sqlite" }, "type": { "name": "TEXT" }, - "originalName": "bio" + "originalName": "c_text" }, { - "name": "books", + "name": "c_blob", "length": -1, - "type": {}, - "embedTable": { - "name": "books" + "table": { + "name": "types_sqlite" + }, + "type": { + "name": "BLOB" + }, + "originalName": "c_blob" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "integer" } } ], - "parameters": [ + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_real) AS max_real,\n MAX(c_text) AS max_text\nFROM types_sqlite", + "name": "GetSqliteFunctions", + "cmd": ":one", + "columns": [ { - "number": 1, - "column": { - "name": "name", - "notNull": true, - "length": -1, - "table": { - "name": "books" - }, - "type": { - "name": "TEXT" - }, - "originalName": "name" + "name": "max_integer", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_real", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" + } + }, + { + "name": "max_text", + "length": -1, + "isFuncCall": true, + "type": { + "name": "any" } } ], "filename": "query.sql" }, { - "text": "DELETE FROM authors", - "name": "DeleteAllAuthors", + "text": "DELETE FROM types_sqlite", + "name": "DeleteAllSqliteTypes", "cmd": ":exec", "filename": "query.sql" } diff --git a/examples/SqliteLegacyExample/request.message b/examples/SqliteLegacyExample/request.message index 98e282573a50f118c0cec4f89d307ca6fded8f92..c8070b761901cb0246f60ce5aa16705ae114fd09 100644 GIT binary patch delta 86 zcmexj`^8p(YX&P9qmd9>abZqoNvf3QL;>-M0YVe`G?|n%z-*3*t2rkoX>8o`i*a)` ovl-*$bxdbjdYQN^CVLAiPL>wu-^{_oBDi@2i!%FW9@!U+0Crg!#{d8T delta 93 zcmexj`^8p(YX&P9qmd9>abZqoNvf3kL;-PulFEYAV*TRejMUsjJ)p!yK5Zr?HK5GI wrTi0 Date: Fri, 22 Aug 2025 23:17:02 +0200 Subject: [PATCH 29/33] fix: separate numeric data types queries for Postgtres --- .../Templates/PostgresTests.cs | 38 +- end2end/EndToEndTests/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 38 +- end2end/EndToEndTests/NpgsqlTester.cs | 1 + .../EndToEndTests/NpgsqlTester.generated.cs | 38 +- .../EndToEndTestsLegacy/NpgsqlDapperTester.cs | 1 + .../NpgsqlDapperTester.generated.cs | 38 +- end2end/EndToEndTestsLegacy/NpgsqlTester.cs | 1 + .../NpgsqlTester.generated.cs | 38 +- examples/NpgsqlDapperExample/Models.cs | 7 +- examples/NpgsqlDapperExample/QuerySql.cs | 225 ++++-- examples/NpgsqlDapperExample/request.json | 720 ++++++++++-------- examples/NpgsqlDapperExample/request.message | 341 ++++----- examples/NpgsqlDapperLegacyExample/Models.cs | 7 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 225 ++++-- .../NpgsqlDapperLegacyExample/request.json | 720 ++++++++++-------- .../NpgsqlDapperLegacyExample/request.message | 341 ++++----- examples/NpgsqlExample/Models.cs | 3 +- examples/NpgsqlExample/QuerySql.cs | 337 ++++++-- examples/NpgsqlExample/request.json | 720 ++++++++++-------- examples/NpgsqlExample/request.message | 341 ++++----- examples/NpgsqlLegacyExample/Models.cs | 7 +- examples/NpgsqlLegacyExample/QuerySql.cs | 416 +++++++--- examples/NpgsqlLegacyExample/request.json | 720 ++++++++++-------- examples/NpgsqlLegacyExample/request.message | 341 ++++----- examples/config/postgresql/types/query.sql | 140 ++-- examples/config/postgresql/types/schema.sql | 13 +- 27 files changed, 3338 insertions(+), 2480 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index e8a0c760..a1d31332 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -63,7 +63,7 @@ public async Task TestPostgresIntegerTypes( int cInteger, long cBigint) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, @@ -71,17 +71,17 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CBigint = cBigint }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresNumericTypesRow { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); @@ -104,7 +104,7 @@ public async Task TestPostgresFloatingPointTypes( double? cDoublePrecision, decimal? cMoney) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CReal = cReal, CNumeric = cNumeric, @@ -113,7 +113,7 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CMoney = cMoney }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresNumericTypesRow { CReal = cReal, CNumeric = cNumeric, @@ -121,10 +121,10 @@ await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CReal, Is.EqualTo(y.CReal)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -329,7 +329,7 @@ public async Task TestIntegerCopyFrom( long? cBigint) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, @@ -337,8 +337,8 @@ public async Task TestIntegerCopyFrom( CBigint = cBigint }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CBoolean = cBoolean, @@ -346,10 +346,10 @@ public async Task TestIntegerCopyFrom( CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -375,7 +375,7 @@ public async Task TestFloatingPointCopyFrom( decimal? cMoney) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, @@ -384,8 +384,8 @@ public async Task TestFloatingPointCopyFrom( CMoney = cMoney }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CReal = cReal, @@ -394,10 +394,10 @@ public async Task TestFloatingPointCopyFrom( CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CReal, Is.EqualTo(y.CReal)); @@ -868,7 +868,7 @@ public async Task TestPostgresDataTypesOverride( string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CInteger = cInteger }); diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.cs b/end2end/EndToEndTests/NpgsqlDapperTester.cs index c863d76c..064d1130 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 03e712e8..2465ddb2 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -378,7 +378,7 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CInteger = cInteger }); await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow @@ -441,17 +441,17 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPost [TestCase(null, null, null, null)] public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int cInteger, long cBigint) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); @@ -465,8 +465,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric, decimal? cDecimal, double? cDoublePrecision, decimal? cMoney) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CReal = cReal, CNumeric = cNumeric, @@ -474,9 +474,9 @@ public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CReal, Is.EqualTo(y.CReal)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -780,9 +780,9 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP [TestCase(10, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSmallint, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CBoolean = cBoolean, @@ -790,9 +790,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSma CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -807,9 +807,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgre [TestCase(10, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal? cDecimal, decimal? cNumeric, double? cDoublePrecision, decimal? cMoney) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CReal = cReal, @@ -818,9 +818,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CReal, Is.EqualTo(y.CReal)); diff --git a/end2end/EndToEndTests/NpgsqlTester.cs b/end2end/EndToEndTests/NpgsqlTester.cs index 522d85b7..0ecdc5bd 100644 --- a/end2end/EndToEndTests/NpgsqlTester.cs +++ b/end2end/EndToEndTests/NpgsqlTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTables() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 110e0c7f..e6267518 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -378,7 +378,7 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CInteger = cInteger }); await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow @@ -441,17 +441,17 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPost [TestCase(null, null, null, null)] public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int cInteger, long cBigint) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); @@ -465,8 +465,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric, decimal? cDecimal, double? cDoublePrecision, decimal? cMoney) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CReal = cReal, CNumeric = cNumeric, @@ -474,9 +474,9 @@ public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CReal, Is.EqualTo(y.CReal)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -780,9 +780,9 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP [TestCase(10, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSmallint, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CBoolean = cBoolean, @@ -790,9 +790,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSma CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -807,9 +807,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgre [TestCase(10, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal? cDecimal, decimal? cNumeric, double? cDoublePrecision, decimal? cMoney) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CReal = cReal, @@ -818,9 +818,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CReal, Is.EqualTo(y.CReal)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs index a5dbdc0a..ba19090d 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 37035592..4904081a 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -378,7 +378,7 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CInteger = cInteger }); await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow @@ -441,17 +441,17 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPost [TestCase(null, null, null, null)] public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int cInteger, long cBigint) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); @@ -465,8 +465,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric, decimal? cDecimal, double? cDoublePrecision, decimal? cMoney) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CReal = cReal, CNumeric = cNumeric, @@ -474,9 +474,9 @@ public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CReal, Is.EqualTo(y.CReal)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -780,9 +780,9 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP [TestCase(10, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSmallint, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CBoolean = cBoolean, @@ -790,9 +790,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSma CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -807,9 +807,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgre [TestCase(10, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal? cDecimal, decimal? cNumeric, double? cDoublePrecision, decimal? cMoney) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CReal = cReal, @@ -818,9 +818,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CReal, Is.EqualTo(y.CReal)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs index 434ad572..bc7d976b 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs @@ -15,6 +15,7 @@ public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); await QuerySql.TruncatePostgresTypes(); + await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 2764af55..89c31c17 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -378,7 +378,7 @@ public async Task TestMultipleArrays() [TestCase(null, null, "1970-01-01 00:00:00")] public async Task TestPostgresDataTypesOverride(int? cInteger, string cVarchar, DateTime cTimestamp) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CInteger = cInteger }); + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CInteger = cInteger }); await QuerySql.InsertPostgresDateTimeTypes(new QuerySql.InsertPostgresDateTimeTypesArgs { CTimestamp = cTimestamp }); await QuerySql.InsertPostgresStringTypes(new QuerySql.InsertPostgresStringTypesArgs { CVarchar = cVarchar }); var expected = new QuerySql.GetPostgresFunctionsRow @@ -441,17 +441,17 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesRow x, QuerySql.GetPost [TestCase(null, null, null, null)] public async Task TestPostgresIntegerTypes(bool cBoolean, short cSmallint, int cInteger, long cBigint) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); Assert.That(x.CSmallint, Is.EqualTo(y.CSmallint)); @@ -465,8 +465,8 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTy [TestCase(null, null, null, null, null)] public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric, decimal? cDecimal, double? cDoublePrecision, decimal? cMoney) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresNumericTypes(new QuerySql.InsertPostgresNumericTypesArgs { CReal = cReal, CNumeric = cNumeric, CDecimal = cDecimal, CDoublePrecision = cDoublePrecision, CMoney = cMoney }); + var expected = new QuerySql.GetPostgresNumericTypesRow { CReal = cReal, CNumeric = cNumeric, @@ -474,9 +474,9 @@ public async Task TestPostgresFloatingPointTypes(float? cReal, decimal? cNumeric CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresNumericTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesRow x, QuerySql.GetPostgresNumericTypesRow y) { Assert.That(x.CReal, Is.EqualTo(y.CReal)); Assert.That(x.CNumeric, Is.EqualTo(y.CNumeric)); @@ -780,9 +780,9 @@ void AssertSingularEquals(QuerySql.GetPostgresStringTypesCntRow x, QuerySql.GetP [TestCase(10, null, null, null, null)] public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSmallint, int? cInteger, long? cBigint) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CBoolean = cBoolean, CSmallint = cSmallint, CInteger = cInteger, CBigint = cBigint }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CBoolean = cBoolean, @@ -790,9 +790,9 @@ public async Task TestIntegerCopyFrom(int batchSize, bool? cBoolean, short? cSma CInteger = cInteger, CBigint = cBigint }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBoolean, Is.EqualTo(y.CBoolean)); @@ -807,9 +807,9 @@ void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgre [TestCase(10, null, null, null, null, null)] public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal? cDecimal, decimal? cNumeric, double? cDoublePrecision, decimal? cMoney) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresNumericTypesBatchArgs { CReal = cReal, CDecimal = cDecimal, CNumeric = cNumeric, CDoublePrecision = cDoublePrecision, CMoney = cMoney }).ToList(); + await QuerySql.InsertPostgresNumericTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresNumericTypesCntRow { Cnt = batchSize, CReal = cReal, @@ -818,9 +818,9 @@ public async Task TestFloatingPointCopyFrom(int batchSize, float? cReal, decimal CDoublePrecision = cDoublePrecision, CMoney = cMoney }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresNumericTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresNumericTypesCntRow x, QuerySql.GetPostgresNumericTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CReal, Is.EqualTo(y.CReal)); diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 0cf808d6..44639a8e 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -23,6 +23,11 @@ public class Book public string? Description { get; init; } }; public class PostgresType +{ + public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } +}; +public class PostgresNumericType { public bool? CBoolean { get; init; } public byte[]? CBit { get; init; } @@ -34,8 +39,6 @@ public class PostgresType public float? CReal { get; init; } public double? CDoublePrecision { get; init; } public decimal? CMoney { get; init; } - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } }; public class PostgresStringType { diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index 7535cc3d..c4f8d91f 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -562,35 +562,15 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } }; public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) @@ -605,18 +585,9 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { - public bool? CBoolean { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } public Guid? CUuid { get; init; } }; public async Task InsertPostgresTypesBatch(List args) @@ -629,15 +600,6 @@ public async Task InsertPostgresTypesBatch(List ar foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); await writer.WriteAsync(row.CUuid); } @@ -648,19 +610,9 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { - public bool? CBoolean { get; init; } - public byte[]? CBit { get; init; } - public short? CSmallint { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } public Guid? CUuid { get; init; } public CEnum? CEnum { get; init; } }; @@ -680,18 +632,9 @@ public class GetPostgresTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { - public short? CSmallint { get; init; } - public bool? CBoolean { get; init; } - public int? CInteger { get; init; } - public long? CBigint { get; init; } - public float? CReal { get; init; } - public decimal? CNumeric { get; init; } - public decimal? CDecimal { get; init; } - public double? CDoublePrecision { get; init; } - public decimal? CMoney { get; init; } public Guid? CUuid { get; init; } public required long Cnt { get; init; } }; @@ -711,7 +654,7 @@ public class GetPostgresTypesCntRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; init; } @@ -749,6 +692,164 @@ public async Task TruncatePostgresTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } + private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; + public class InsertPostgresNumericTypesArgs + { + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public decimal? CDecimal { get; init; } + public decimal? CNumeric { get; init; } + public float? CReal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + }; + public async Task InsertPostgresNumericTypes(InsertPostgresNumericTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresNumericTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNumericTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresNumericTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1"; + public class GetPostgresNumericTypesRow + { + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public decimal? CDecimal { get; init; } + public decimal? CNumeric { get; init; } + public float? CReal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + }; + public async Task GetPostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresNumericTypesSql = "TRUNCATE TABLE postgres_numeric_types"; + public async Task TruncatePostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresNumericTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNumericTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresNumericTypesCntSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money, COUNT(*) AS cnt FROM postgres_numeric_types GROUP BY c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money LIMIT 1"; + public class GetPostgresNumericTypesCntRow + { + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public decimal? CDecimal { get; init; } + public decimal? CNumeric { get; init; } + public float? CReal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + public required long Cnt { get; init; } + }; + public async Task GetPostgresNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresNumericTypesBatchSql = "COPY postgres_numeric_types (c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNumericTypesBatchArgs + { + public bool? CBoolean { get; init; } + public byte[]? CBit { get; init; } + public short? CSmallint { get; init; } + public int? CInteger { get; init; } + public long? CBigint { get; init; } + public decimal? CDecimal { get; init; } + public decimal? CNumeric { get; init; } + public float? CReal { get; init; } + public double? CDoublePrecision { get; init; } + public decimal? CMoney { get; init; } + }; + public async Task InsertPostgresNumericTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNumericTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CBit); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; public class InsertPostgresStringTypesArgs { diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index 6f839469..ad7ede22 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -121,11 +121,38 @@ }, "columns": [ { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } + } + ] + }, + { + "rel": { + "name": "postgres_numeric_types" + }, + "columns": [ + { + "name": "c_boolean", + "length": -1, + "table": { + "name": "postgres_numeric_types" + }, "type": { "schema": "pg_catalog", "name": "bool" @@ -135,7 +162,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -146,7 +173,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -157,7 +184,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -168,7 +195,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -179,7 +206,7 @@ "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -190,7 +217,7 @@ "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -201,7 +228,7 @@ "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -212,7 +239,7 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -223,31 +250,11 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" } - }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } } ] }, @@ -33459,14 +33466,14 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "isNamedParam": true, "table": { @@ -33474,194 +33481,159 @@ "name": "postgres_types" }, "type": { - "name": "pg_catalog.bool" + "name": "uuid" }, - "originalName": "c_boolean" + "originalName": "c_uuid" } }, { "number": 2, "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 4, - "column": { - "name": "c_integer", + "name": "c_enum", "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" + "name": "c_enum" + } } - }, + } + ], + "comments": [ + " Special types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_bigint", + "name": "c_uuid", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "c_bigint" + "originalName": "c_uuid" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", + "cmd": ":one", + "columns": [ { - "number": 6, - "column": { - "name": "c_real", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 7, - "column": { - "name": "c_numeric", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 11, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypes", + "cmd": ":exec", "parameters": [ { "number": 1, @@ -33670,7 +33642,7 @@ "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.bool" @@ -33681,154 +33653,157 @@ { "number": 2, "column": { - "name": "c_smallint", + "name": "c_bit", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int2" + "name": "pg_catalog.bit" }, - "originalName": "c_smallint" + "originalName": "c_bit" } }, { "number": 3, "column": { - "name": "c_integer", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int4" + "name": "pg_catalog.int2" }, - "originalName": "c_integer" + "originalName": "c_smallint" } }, { "number": 4, "column": { - "name": "c_bigint", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "pg_catalog.int4" }, - "originalName": "c_bigint" + "originalName": "c_integer" } }, { "number": 5, "column": { - "name": "c_real", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float4" + "name": "pg_catalog.int8" }, - "originalName": "c_real" + "originalName": "c_bigint" } }, { "number": 6, "column": { - "name": "c_numeric", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_numeric" + "originalName": "c_decimal" } }, { "number": 7, "column": { - "name": "c_decimal", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_decimal" + "originalName": "c_numeric" } }, { "number": 8, "column": { - "name": "c_double_precision", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float8" + "name": "pg_catalog.float4" }, - "originalName": "c_double_precision" + "originalName": "c_real" } }, { "number": 9, "column": { - "name": "c_money", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "money" + "name": "pg_catalog.float8" }, - "originalName": "c_money" + "originalName": "c_double_precision" } }, { "number": 10, "column": { - "name": "c_uuid", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "name": "money" }, - "originalName": "c_uuid" + "originalName": "c_money" } } ], + "comments": [ + " Numeric types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "postgres_numeric_types" } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", + "text": "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1", + "name": "GetPostgresNumericTypes", "cmd": ":one", "columns": [ { "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33840,7 +33815,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33852,7 +33827,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33864,7 +33839,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33876,7 +33851,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33885,22 +33860,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33909,22 +33884,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33936,48 +33911,56 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_numeric_types", + "name": "TruncatePostgresNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money,\n COUNT(*) AS cnt\nFROM postgres_numeric_types\nGROUP BY\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\nLIMIT 1", + "name": "GetPostgresNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_uuid", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "bool" }, - "originalName": "c_uuid" + "originalName": "c_boolean" }, { - "name": "c_enum", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "bit" }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_bit" + }, { "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33985,23 +33968,11 @@ }, "originalName": "c_smallint" }, - { - "name": "c_boolean", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, { "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34013,7 +33984,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34022,22 +33993,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34046,22 +34017,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34073,24 +34044,13 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, { "name": "cnt", "notNull": true, @@ -34104,45 +34064,165 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "INSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_decimal", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_numeric_types" + } }, { "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index eb1415fe..715eae5c 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunner■ы public"└publicГ +./dist/LocalRunnerъь public"мpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,30 +13,31 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext▄ -postgres_types< - c_boolean0         Rpostgres_typesb -pg_catalogbool7 -c_bit0         Rpostgres_typesb -pg_catalogbit= - -c_smallint0         Rpostgres_typesb -pg_catalogint2< - c_integer0         Rpostgres_typesb -pg_catalogint4; -c_bigint0         Rpostgres_typesb -pg_catalogint8? - c_decimal0         Rpostgres_typesb -pg_catalognumeric? - c_numeric0         Rpostgres_typesb -pg_catalognumeric; -c_real0         Rpostgres_typesb -pg_catalogfloat4G -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8/ -c_money0         Rpostgres_typesbmoney- + description0         Rbooksbtextr +postgres_types- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumч +c_enum0         Rpostgres_typesbc_enum╘ +postgres_numeric_typesD + c_boolean0         Rpostgres_numeric_typesb +pg_catalogbool? +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitE + +c_smallint0         Rpostgres_numeric_typesb +pg_catalogint2D + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4C +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8G + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericG + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericC +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4O +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat87 +c_money0         Rpostgres_numeric_typesbmoneyч postgres_string_typesB c_char0         Rpostgres_string_typesb pg_catalogbpcharF @@ -10324,189 +10325,181 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql╧ - -┴ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ +X INSERT INTO postgres_types ( - c_boolean, - c_bit, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid, c_enum ) VALUES ( $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12::c_enum -)InsertPostgresTypes:exec*TP - c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG -c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR - -c_smallint0         8Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*TP - c_integer0         8Rpublicpostgres_typesbpg_catalog.int4z c_integer*RN -c_bigint0         8Rpublicpostgres_typesbpg_catalog.int8zc_bigint*PL -c_real0         8Rpublicpostgres_typesbpg_catalog.float4zc_real*WS - c_numeric0         8Rpublicpostgres_typesbpg_catalog.numericz c_numeric*WS - c_decimal0         8Rpublicpostgres_typesbpg_catalog.numericz c_decimal*h d -c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F -B -c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  -c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ -ШINSERT INTO postgres_types + $2::c_enum +)InsertPostgresTypes:exec*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ +;INSERT INTO postgres_types ( - c_boolean, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid ) VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 -)InsertPostgresTypesBatch :copyfrom*RN - c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP - -c_smallint0         Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*RN - c_integer0         Rpublicpostgres_typesbpg_catalog.int4z c_integer*PL -c_bigint0         Rpublicpostgres_typesbpg_catalog.int8zc_bigint*NJ -c_real0         Rpublicpostgres_typesbpg_catalog.float4zc_real*UQ - c_numeric0         Rpublicpostgres_typesbpg_catalog.numericz c_numeric*UQ - c_decimal0         Rpublicpostgres_typesbpg_catalog.numericz c_decimal*fb -c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ -c_money0         Rpublicpostgres_typesbmoneyzc_money*A -= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ -╒SELECT + $1 +)InsertPostgresTypesBatch :copyfrom*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ +:SELECT + c_uuid, + c_enum +FROM postgres_types +LIMIT 1GetPostgresTypes:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ +VSELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_types +GROUP BY + c_uuid +LIMIT 1GetPostgresTypesCnt:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql№ +╥SELECT + MAX(c_integer) AS max_integer, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM postgres_datetime_types +CROSS JOIN postgres_numeric_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( + max_integer0         @b +anyarray"( + max_varchar0         @b +anyarray"* + max_timestamp0         @b +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +э +INSERT INTO postgres_numeric_types +( c_boolean, c_bit, c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"> -c_bit0         Rpostgres_typesb -pg_catalogbitzc_bit"I - -c_smallint0         Rpostgres_typesb + c_money +) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypes:exec*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money2 Numeric types : query.sqlBpostgres_numeric_typesь +ЧSELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1GetPostgresNumericTypes:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД -ьSELECT - c_smallint, +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money: query.sqlW +%TRUNCATE TABLE postgres_numeric_typesTruncatePostgresNumericTypes:exec: query.sqlъ +ЄSELECT c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, c_money, - c_uuid, COUNT(*) AS cnt -FROM postgres_types +FROM postgres_numeric_types GROUP BY - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid -LIMIT 1GetPostgresTypesCnt:one"I - -c_smallint0         Rpostgres_typesb + c_money +LIMIT 1GetPostgresNumericTypesCnt:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sqlЇ -╩SELECT - MAX(c_integer) AS max_integer, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM postgres_types -CROSS JOIN postgres_string_types -CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( - max_integer0         @b -anyarray"( - max_varchar0         @b -anyarray"* - max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money" +cnt0         @bbigint: query.sql╫ +ьINSERT INTO postgres_numeric_types +( + c_boolean, + c_bit, + c_smallint, + c_integer, + c_bigint, + c_decimal, + c_numeric, + c_real, + c_double_precision, + c_money +) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypesBatch :copyfrom*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money: query.sqlBpostgres_numeric_typesй П INSERT INTO postgres_string_types ( diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index 90dd3622..a00482ec 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -24,6 +24,11 @@ public class Book public string Description { get; set; } }; public class PostgresType + { + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } + }; + public class PostgresNumericType { public bool? CBoolean { get; set; } public byte[] CBit { get; set; } @@ -35,8 +40,6 @@ public class PostgresType public float? CReal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } }; public class PostgresStringType { diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 04b6ff3c..65a76398 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -563,35 +563,15 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { var queryParams = new Dictionary(); - queryParams.Add("c_boolean", args.CBoolean); - queryParams.Add("c_bit", args.CBit); - queryParams.Add("c_smallint", args.CSmallint); - queryParams.Add("c_integer", args.CInteger); - queryParams.Add("c_bigint", args.CBigint); - queryParams.Add("c_real", args.CReal); - queryParams.Add("c_numeric", args.CNumeric); - queryParams.Add("c_decimal", args.CDecimal); - queryParams.Add("c_double_precision", args.CDoublePrecision); - queryParams.Add("c_money", args.CMoney); queryParams.Add("c_uuid", args.CUuid); queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) @@ -606,18 +586,9 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } }; public async Task InsertPostgresTypesBatch(List args) @@ -630,15 +601,6 @@ public async Task InsertPostgresTypesBatch(List ar foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean); - await writer.WriteAsync(row.CSmallint); - await writer.WriteAsync(row.CInteger); - await writer.WriteAsync(row.CBigint); - await writer.WriteAsync(row.CReal); - await writer.WriteAsync(row.CNumeric); - await writer.WriteAsync(row.CDecimal); - await writer.WriteAsync(row.CDoublePrecision); - await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); await writer.WriteAsync(row.CUuid); } @@ -649,19 +611,9 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; @@ -681,18 +633,9 @@ public async Task GetPostgresTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public long Cnt { get; set; } }; @@ -712,7 +655,7 @@ public async Task GetPostgresTypesCnt() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -750,6 +693,164 @@ public async Task TruncatePostgresTypes() await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); } + private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; + public class InsertPostgresNumericTypesArgs + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task InsertPostgresNumericTypes(InsertPostgresNumericTypesArgs args) + { + var queryParams = new Dictionary(); + queryParams.Add("c_boolean", args.CBoolean); + queryParams.Add("c_bit", args.CBit); + queryParams.Add("c_smallint", args.CSmallint); + queryParams.Add("c_integer", args.CInteger); + queryParams.Add("c_bigint", args.CBigint); + queryParams.Add("c_decimal", args.CDecimal); + queryParams.Add("c_numeric", args.CNumeric); + queryParams.Add("c_real", args.CReal); + queryParams.Add("c_double_precision", args.CDoublePrecision); + queryParams.Add("c_money", args.CMoney); + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(InsertPostgresNumericTypesSql, queryParams); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresNumericTypesSql, queryParams, transaction: this.Transaction); + } + + private const string GetPostgresNumericTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1"; + public class GetPostgresNumericTypesRow + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task GetPostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesSql, transaction: this.Transaction); + } + + private const string TruncatePostgresNumericTypesSql = "TRUNCATE TABLE postgres_numeric_types"; + public async Task TruncatePostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + await connection.ExecuteAsync(TruncatePostgresNumericTypesSql); + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresNumericTypesSql, transaction: this.Transaction); + } + + private const string GetPostgresNumericTypesCntSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money, COUNT(*) AS cnt FROM postgres_numeric_types GROUP BY c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money LIMIT 1"; + public class GetPostgresNumericTypesCntRow + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresNumericTypesCntSql, transaction: this.Transaction); + } + + private const string InsertPostgresNumericTypesBatchSql = "COPY postgres_numeric_types (c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNumericTypesBatchArgs + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task InsertPostgresNumericTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNumericTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean); + await writer.WriteAsync(row.CBit); + await writer.WriteAsync(row.CSmallint); + await writer.WriteAsync(row.CInteger); + await writer.WriteAsync(row.CBigint); + await writer.WriteAsync(row.CDecimal); + await writer.WriteAsync(row.CNumeric); + await writer.WriteAsync(row.CReal); + await writer.WriteAsync(row.CDoublePrecision); + await writer.WriteAsync(row.CMoney, NpgsqlDbType.Money); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; public class InsertPostgresStringTypesArgs { diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 957cd318..0b53a349 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -121,11 +121,38 @@ }, "columns": [ { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } + } + ] + }, + { + "rel": { + "name": "postgres_numeric_types" + }, + "columns": [ + { + "name": "c_boolean", + "length": -1, + "table": { + "name": "postgres_numeric_types" + }, "type": { "schema": "pg_catalog", "name": "bool" @@ -135,7 +162,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -146,7 +173,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -157,7 +184,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -168,7 +195,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -179,7 +206,7 @@ "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -190,7 +217,7 @@ "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -201,7 +228,7 @@ "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -212,7 +239,7 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -223,31 +250,11 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" } - }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } } ] }, @@ -33459,14 +33466,14 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "isNamedParam": true, "table": { @@ -33474,194 +33481,159 @@ "name": "postgres_types" }, "type": { - "name": "pg_catalog.bool" + "name": "uuid" }, - "originalName": "c_boolean" + "originalName": "c_uuid" } }, { "number": 2, "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 4, - "column": { - "name": "c_integer", + "name": "c_enum", "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" + "name": "c_enum" + } } - }, + } + ], + "comments": [ + " Special types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_bigint", + "name": "c_uuid", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "c_bigint" + "originalName": "c_uuid" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", + "cmd": ":one", + "columns": [ { - "number": 6, - "column": { - "name": "c_real", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 7, - "column": { - "name": "c_numeric", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 11, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypes", + "cmd": ":exec", "parameters": [ { "number": 1, @@ -33670,7 +33642,7 @@ "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.bool" @@ -33681,154 +33653,157 @@ { "number": 2, "column": { - "name": "c_smallint", + "name": "c_bit", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int2" + "name": "pg_catalog.bit" }, - "originalName": "c_smallint" + "originalName": "c_bit" } }, { "number": 3, "column": { - "name": "c_integer", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int4" + "name": "pg_catalog.int2" }, - "originalName": "c_integer" + "originalName": "c_smallint" } }, { "number": 4, "column": { - "name": "c_bigint", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "pg_catalog.int4" }, - "originalName": "c_bigint" + "originalName": "c_integer" } }, { "number": 5, "column": { - "name": "c_real", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float4" + "name": "pg_catalog.int8" }, - "originalName": "c_real" + "originalName": "c_bigint" } }, { "number": 6, "column": { - "name": "c_numeric", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_numeric" + "originalName": "c_decimal" } }, { "number": 7, "column": { - "name": "c_decimal", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_decimal" + "originalName": "c_numeric" } }, { "number": 8, "column": { - "name": "c_double_precision", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float8" + "name": "pg_catalog.float4" }, - "originalName": "c_double_precision" + "originalName": "c_real" } }, { "number": 9, "column": { - "name": "c_money", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "money" + "name": "pg_catalog.float8" }, - "originalName": "c_money" + "originalName": "c_double_precision" } }, { "number": 10, "column": { - "name": "c_uuid", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "name": "money" }, - "originalName": "c_uuid" + "originalName": "c_money" } } ], + "comments": [ + " Numeric types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "postgres_numeric_types" } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", + "text": "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1", + "name": "GetPostgresNumericTypes", "cmd": ":one", "columns": [ { "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33840,7 +33815,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33852,7 +33827,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33864,7 +33839,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33876,7 +33851,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33885,22 +33860,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33909,22 +33884,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33936,48 +33911,56 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_numeric_types", + "name": "TruncatePostgresNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money,\n COUNT(*) AS cnt\nFROM postgres_numeric_types\nGROUP BY\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\nLIMIT 1", + "name": "GetPostgresNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_uuid", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "bool" }, - "originalName": "c_uuid" + "originalName": "c_boolean" }, { - "name": "c_enum", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "bit" }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_bit" + }, { "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33985,23 +33968,11 @@ }, "originalName": "c_smallint" }, - { - "name": "c_boolean", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, { "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34013,7 +33984,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34022,22 +33993,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34046,22 +34017,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34073,24 +34044,13 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, { "name": "cnt", "notNull": true, @@ -34104,45 +34064,165 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "INSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_decimal", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_numeric_types" + } }, { "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index b3f18e8c..86c379b9 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunner■ы public"└publicГ +./dist/LocalRunnerъь public"мpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,30 +13,31 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext▄ -postgres_types< - c_boolean0         Rpostgres_typesb -pg_catalogbool7 -c_bit0         Rpostgres_typesb -pg_catalogbit= - -c_smallint0         Rpostgres_typesb -pg_catalogint2< - c_integer0         Rpostgres_typesb -pg_catalogint4; -c_bigint0         Rpostgres_typesb -pg_catalogint8? - c_decimal0         Rpostgres_typesb -pg_catalognumeric? - c_numeric0         Rpostgres_typesb -pg_catalognumeric; -c_real0         Rpostgres_typesb -pg_catalogfloat4G -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8/ -c_money0         Rpostgres_typesbmoney- + description0         Rbooksbtextr +postgres_types- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumч +c_enum0         Rpostgres_typesbc_enum╘ +postgres_numeric_typesD + c_boolean0         Rpostgres_numeric_typesb +pg_catalogbool? +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitE + +c_smallint0         Rpostgres_numeric_typesb +pg_catalogint2D + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4C +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8G + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericG + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericC +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4O +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat87 +c_money0         Rpostgres_numeric_typesbmoneyч postgres_string_typesB c_char0         Rpostgres_string_typesb pg_catalogbpcharF @@ -10324,189 +10325,181 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql╧ - -┴ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ +X INSERT INTO postgres_types ( - c_boolean, - c_bit, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid, c_enum ) VALUES ( $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12::c_enum -)InsertPostgresTypes:exec*TP - c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG -c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR - -c_smallint0         8Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*TP - c_integer0         8Rpublicpostgres_typesbpg_catalog.int4z c_integer*RN -c_bigint0         8Rpublicpostgres_typesbpg_catalog.int8zc_bigint*PL -c_real0         8Rpublicpostgres_typesbpg_catalog.float4zc_real*WS - c_numeric0         8Rpublicpostgres_typesbpg_catalog.numericz c_numeric*WS - c_decimal0         8Rpublicpostgres_typesbpg_catalog.numericz c_decimal*h d -c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F -B -c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  -c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ -ШINSERT INTO postgres_types + $2::c_enum +)InsertPostgresTypes:exec*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ +;INSERT INTO postgres_types ( - c_boolean, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid ) VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 -)InsertPostgresTypesBatch :copyfrom*RN - c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP - -c_smallint0         Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*RN - c_integer0         Rpublicpostgres_typesbpg_catalog.int4z c_integer*PL -c_bigint0         Rpublicpostgres_typesbpg_catalog.int8zc_bigint*NJ -c_real0         Rpublicpostgres_typesbpg_catalog.float4zc_real*UQ - c_numeric0         Rpublicpostgres_typesbpg_catalog.numericz c_numeric*UQ - c_decimal0         Rpublicpostgres_typesbpg_catalog.numericz c_decimal*fb -c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ -c_money0         Rpublicpostgres_typesbmoneyzc_money*A -= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ -╒SELECT + $1 +)InsertPostgresTypesBatch :copyfrom*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ +:SELECT + c_uuid, + c_enum +FROM postgres_types +LIMIT 1GetPostgresTypes:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ +VSELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_types +GROUP BY + c_uuid +LIMIT 1GetPostgresTypesCnt:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql№ +╥SELECT + MAX(c_integer) AS max_integer, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM postgres_datetime_types +CROSS JOIN postgres_numeric_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( + max_integer0         @b +anyarray"( + max_varchar0         @b +anyarray"* + max_timestamp0         @b +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +э +INSERT INTO postgres_numeric_types +( c_boolean, c_bit, c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"> -c_bit0         Rpostgres_typesb -pg_catalogbitzc_bit"I - -c_smallint0         Rpostgres_typesb + c_money +) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypes:exec*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money2 Numeric types : query.sqlBpostgres_numeric_typesь +ЧSELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1GetPostgresNumericTypes:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД -ьSELECT - c_smallint, +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money: query.sqlW +%TRUNCATE TABLE postgres_numeric_typesTruncatePostgresNumericTypes:exec: query.sqlъ +ЄSELECT c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, c_money, - c_uuid, COUNT(*) AS cnt -FROM postgres_types +FROM postgres_numeric_types GROUP BY - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid -LIMIT 1GetPostgresTypesCnt:one"I - -c_smallint0         Rpostgres_typesb + c_money +LIMIT 1GetPostgresNumericTypesCnt:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sqlЇ -╩SELECT - MAX(c_integer) AS max_integer, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM postgres_types -CROSS JOIN postgres_string_types -CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( - max_integer0         @b -anyarray"( - max_varchar0         @b -anyarray"* - max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money" +cnt0         @bbigint: query.sql╫ +ьINSERT INTO postgres_numeric_types +( + c_boolean, + c_bit, + c_smallint, + c_integer, + c_bigint, + c_decimal, + c_numeric, + c_real, + c_double_precision, + c_money +) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypesBatch :copyfrom*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money: query.sqlBpostgres_numeric_typesй П INSERT INTO postgres_string_types ( diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index cf635b5a..f5522631 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -11,7 +11,8 @@ namespace NpgsqlExampleGen; public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); -public readonly record struct PostgresType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); +public readonly record struct PostgresType(Guid? CUuid, CEnum? CEnum); +public readonly record struct PostgresNumericType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); public readonly record struct PostgresStringType(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); public readonly record struct PostgresDatetimeType(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); public readonly record struct PostgresNetworkType(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index c64b9af8..09def850 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -743,8 +743,8 @@ public async Task TruncateExtendedBios() } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; - public readonly record struct InsertPostgresTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; + public readonly record struct InsertPostgresTypesArgs(Guid? CUuid, CEnum? CEnum); public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { if (this.Transaction == null) @@ -753,16 +753,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); @@ -778,24 +768,14 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { command.CommandText = InsertPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresTypesBatchArgs(bool? CBoolean, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid); + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresTypesBatchArgs(Guid? CUuid); public async Task InsertPostgresTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -806,15 +786,6 @@ public async Task InsertPostgresTypesBatch(List ar foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); } @@ -825,8 +796,8 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, CEnum? CEnum); + private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; + public readonly record struct GetPostgresTypesRow(Guid? CUuid, CEnum? CEnum); public async Task GetPostgresTypes() { if (this.Transaction == null) @@ -841,18 +812,8 @@ public async Task InsertPostgresTypesBatch(List ar { return new GetPostgresTypesRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CEnum = reader.IsDBNull(1) ? null : reader.GetString(1).ToCEnum() }; } } @@ -874,18 +835,8 @@ public async Task InsertPostgresTypesBatch(List ar { return new GetPostgresTypesRow { - CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? null : reader.GetString(11).ToCEnum() + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + CEnum = reader.IsDBNull(1) ? null : reader.GetString(1).ToCEnum() }; } } @@ -894,8 +845,8 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; - public readonly record struct GetPostgresTypesCntRow(short? CSmallint, bool? CBoolean, int? CInteger, long? CBigint, float? CReal, decimal? CNumeric, decimal? CDecimal, double? CDoublePrecision, decimal? CMoney, Guid? CUuid, long Cnt); + private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; + public readonly record struct GetPostgresTypesCntRow(Guid? CUuid, long Cnt); public async Task GetPostgresTypesCnt() { if (this.Transaction == null) @@ -910,17 +861,8 @@ public async Task InsertPostgresTypesBatch(List ar { return new GetPostgresTypesCntRow { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) }; } } @@ -942,17 +884,8 @@ public async Task InsertPostgresTypesBatch(List ar { return new GetPostgresTypesCntRow { - CSmallint = reader.IsDBNull(0) ? null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) }; } } @@ -961,7 +894,7 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() { @@ -1038,6 +971,242 @@ public async Task TruncatePostgresTypes() } } + private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; + public readonly record struct InsertPostgresNumericTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); + public async Task InsertPostgresNumericTypes(InsertPostgresNumericTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(InsertPostgresNumericTypesSql)) + { + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresNumericTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNumericTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1"; + public readonly record struct GetPostgresNumericTypesRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); + public async Task GetPostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresNumericTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesRow + { + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNumericTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesRow + { + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresNumericTypesSql = "TRUNCATE TABLE postgres_numeric_types"; + public async Task TruncatePostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(TruncatePostgresNumericTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresNumericTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNumericTypesCntSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money, COUNT(*) AS cnt FROM postgres_numeric_types GROUP BY c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money LIMIT 1"; + public readonly record struct GetPostgresNumericTypesCntRow(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney, long Cnt); + public async Task GetPostgresNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresNumericTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesCntRow + { + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + Cnt = reader.GetInt64(10) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNumericTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesCntRow + { + CBoolean = reader.IsDBNull(0) ? null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? null : reader.GetDecimal(9), + Cnt = reader.GetInt64(10) + }; + } + } + } + + return null; + } + + private const string InsertPostgresNumericTypesBatchSql = "COPY postgres_numeric_types (c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresNumericTypesBatchArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); + public async Task InsertPostgresNumericTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNumericTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBit ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; public readonly record struct InsertPostgresStringTypesArgs(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); public async Task InsertPostgresStringTypes(InsertPostgresStringTypesArgs args) diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index 58dec347..5236f9a0 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -121,11 +121,38 @@ }, "columns": [ { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } + } + ] + }, + { + "rel": { + "name": "postgres_numeric_types" + }, + "columns": [ + { + "name": "c_boolean", + "length": -1, + "table": { + "name": "postgres_numeric_types" + }, "type": { "schema": "pg_catalog", "name": "bool" @@ -135,7 +162,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -146,7 +173,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -157,7 +184,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -168,7 +195,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -179,7 +206,7 @@ "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -190,7 +217,7 @@ "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -201,7 +228,7 @@ "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -212,7 +239,7 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -223,31 +250,11 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" } - }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } } ] }, @@ -33459,14 +33466,14 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "isNamedParam": true, "table": { @@ -33474,194 +33481,159 @@ "name": "postgres_types" }, "type": { - "name": "pg_catalog.bool" + "name": "uuid" }, - "originalName": "c_boolean" + "originalName": "c_uuid" } }, { "number": 2, "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 4, - "column": { - "name": "c_integer", + "name": "c_enum", "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" + "name": "c_enum" + } } - }, + } + ], + "comments": [ + " Special types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_bigint", + "name": "c_uuid", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "c_bigint" + "originalName": "c_uuid" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", + "cmd": ":one", + "columns": [ { - "number": 6, - "column": { - "name": "c_real", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 7, - "column": { - "name": "c_numeric", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 11, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypes", + "cmd": ":exec", "parameters": [ { "number": 1, @@ -33670,7 +33642,7 @@ "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.bool" @@ -33681,154 +33653,157 @@ { "number": 2, "column": { - "name": "c_smallint", + "name": "c_bit", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int2" + "name": "pg_catalog.bit" }, - "originalName": "c_smallint" + "originalName": "c_bit" } }, { "number": 3, "column": { - "name": "c_integer", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int4" + "name": "pg_catalog.int2" }, - "originalName": "c_integer" + "originalName": "c_smallint" } }, { "number": 4, "column": { - "name": "c_bigint", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "pg_catalog.int4" }, - "originalName": "c_bigint" + "originalName": "c_integer" } }, { "number": 5, "column": { - "name": "c_real", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float4" + "name": "pg_catalog.int8" }, - "originalName": "c_real" + "originalName": "c_bigint" } }, { "number": 6, "column": { - "name": "c_numeric", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_numeric" + "originalName": "c_decimal" } }, { "number": 7, "column": { - "name": "c_decimal", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_decimal" + "originalName": "c_numeric" } }, { "number": 8, "column": { - "name": "c_double_precision", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float8" + "name": "pg_catalog.float4" }, - "originalName": "c_double_precision" + "originalName": "c_real" } }, { "number": 9, "column": { - "name": "c_money", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "money" + "name": "pg_catalog.float8" }, - "originalName": "c_money" + "originalName": "c_double_precision" } }, { "number": 10, "column": { - "name": "c_uuid", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "name": "money" }, - "originalName": "c_uuid" + "originalName": "c_money" } } ], + "comments": [ + " Numeric types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "postgres_numeric_types" } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", + "text": "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1", + "name": "GetPostgresNumericTypes", "cmd": ":one", "columns": [ { "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33840,7 +33815,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33852,7 +33827,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33864,7 +33839,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33876,7 +33851,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33885,22 +33860,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33909,22 +33884,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33936,48 +33911,56 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_numeric_types", + "name": "TruncatePostgresNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money,\n COUNT(*) AS cnt\nFROM postgres_numeric_types\nGROUP BY\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\nLIMIT 1", + "name": "GetPostgresNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_uuid", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "bool" }, - "originalName": "c_uuid" + "originalName": "c_boolean" }, { - "name": "c_enum", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "bit" }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_bit" + }, { "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33985,23 +33968,11 @@ }, "originalName": "c_smallint" }, - { - "name": "c_boolean", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, { "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34013,7 +33984,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34022,22 +33993,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34046,22 +34017,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34073,24 +34044,13 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, { "name": "cnt", "notNull": true, @@ -34104,45 +34064,165 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "INSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_decimal", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_numeric_types" + } }, { "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 6c5abf85..3a216096 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunner■ы public"└publicГ +./dist/LocalRunnerъь public"мpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,30 +13,31 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext▄ -postgres_types< - c_boolean0         Rpostgres_typesb -pg_catalogbool7 -c_bit0         Rpostgres_typesb -pg_catalogbit= - -c_smallint0         Rpostgres_typesb -pg_catalogint2< - c_integer0         Rpostgres_typesb -pg_catalogint4; -c_bigint0         Rpostgres_typesb -pg_catalogint8? - c_decimal0         Rpostgres_typesb -pg_catalognumeric? - c_numeric0         Rpostgres_typesb -pg_catalognumeric; -c_real0         Rpostgres_typesb -pg_catalogfloat4G -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8/ -c_money0         Rpostgres_typesbmoney- + description0         Rbooksbtextr +postgres_types- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumч +c_enum0         Rpostgres_typesbc_enum╘ +postgres_numeric_typesD + c_boolean0         Rpostgres_numeric_typesb +pg_catalogbool? +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitE + +c_smallint0         Rpostgres_numeric_typesb +pg_catalogint2D + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4C +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8G + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericG + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericC +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4O +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat87 +c_money0         Rpostgres_numeric_typesbmoneyч postgres_string_typesB c_char0         Rpostgres_string_typesb pg_catalogbpcharF @@ -10324,189 +10325,181 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql╧ - -┴ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ +X INSERT INTO postgres_types ( - c_boolean, - c_bit, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid, c_enum ) VALUES ( $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12::c_enum -)InsertPostgresTypes:exec*TP - c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG -c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR - -c_smallint0         8Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*TP - c_integer0         8Rpublicpostgres_typesbpg_catalog.int4z c_integer*RN -c_bigint0         8Rpublicpostgres_typesbpg_catalog.int8zc_bigint*PL -c_real0         8Rpublicpostgres_typesbpg_catalog.float4zc_real*WS - c_numeric0         8Rpublicpostgres_typesbpg_catalog.numericz c_numeric*WS - c_decimal0         8Rpublicpostgres_typesbpg_catalog.numericz c_decimal*h d -c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F -B -c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  -c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ -ШINSERT INTO postgres_types + $2::c_enum +)InsertPostgresTypes:exec*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ +;INSERT INTO postgres_types ( - c_boolean, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid ) VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 -)InsertPostgresTypesBatch :copyfrom*RN - c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP - -c_smallint0         Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*RN - c_integer0         Rpublicpostgres_typesbpg_catalog.int4z c_integer*PL -c_bigint0         Rpublicpostgres_typesbpg_catalog.int8zc_bigint*NJ -c_real0         Rpublicpostgres_typesbpg_catalog.float4zc_real*UQ - c_numeric0         Rpublicpostgres_typesbpg_catalog.numericz c_numeric*UQ - c_decimal0         Rpublicpostgres_typesbpg_catalog.numericz c_decimal*fb -c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ -c_money0         Rpublicpostgres_typesbmoneyzc_money*A -= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ -╒SELECT + $1 +)InsertPostgresTypesBatch :copyfrom*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ +:SELECT + c_uuid, + c_enum +FROM postgres_types +LIMIT 1GetPostgresTypes:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ +VSELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_types +GROUP BY + c_uuid +LIMIT 1GetPostgresTypesCnt:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql№ +╥SELECT + MAX(c_integer) AS max_integer, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM postgres_datetime_types +CROSS JOIN postgres_numeric_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( + max_integer0         @b +anyarray"( + max_varchar0         @b +anyarray"* + max_timestamp0         @b +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +э +INSERT INTO postgres_numeric_types +( c_boolean, c_bit, c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"> -c_bit0         Rpostgres_typesb -pg_catalogbitzc_bit"I - -c_smallint0         Rpostgres_typesb + c_money +) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypes:exec*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money2 Numeric types : query.sqlBpostgres_numeric_typesь +ЧSELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1GetPostgresNumericTypes:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД -ьSELECT - c_smallint, +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money: query.sqlW +%TRUNCATE TABLE postgres_numeric_typesTruncatePostgresNumericTypes:exec: query.sqlъ +ЄSELECT c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, c_money, - c_uuid, COUNT(*) AS cnt -FROM postgres_types +FROM postgres_numeric_types GROUP BY - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid -LIMIT 1GetPostgresTypesCnt:one"I - -c_smallint0         Rpostgres_typesb + c_money +LIMIT 1GetPostgresNumericTypesCnt:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sqlЇ -╩SELECT - MAX(c_integer) AS max_integer, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM postgres_types -CROSS JOIN postgres_string_types -CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( - max_integer0         @b -anyarray"( - max_varchar0         @b -anyarray"* - max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money" +cnt0         @bbigint: query.sql╫ +ьINSERT INTO postgres_numeric_types +( + c_boolean, + c_bit, + c_smallint, + c_integer, + c_bigint, + c_decimal, + c_numeric, + c_real, + c_double_precision, + c_money +) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypesBatch :copyfrom*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money: query.sqlBpostgres_numeric_typesй П INSERT INTO postgres_string_types ( diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 6ebc42d6..01b6408b 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -24,6 +24,11 @@ public class Book public string Description { get; set; } }; public class PostgresType + { + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } + }; + public class PostgresNumericType { public bool? CBoolean { get; set; } public byte[] CBit { get; set; } @@ -35,8 +40,6 @@ public class PostgresType public float? CReal { get; set; } public double? CDoublePrecision { get; set; } public decimal? CMoney { get; set; } - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } }; public class PostgresStringType { diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 2a1ca7a5..ed987e4e 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -854,19 +854,9 @@ public async Task TruncateExtendedBios() } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum ) VALUES ( @c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_real, @c_numeric, @c_decimal, @c_double_precision, @c_money, @c_uuid, @c_enum::c_enum )"; + private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; public class InsertPostgresTypesArgs { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; @@ -878,16 +868,6 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { using (var command = connection.CreateCommand(InsertPostgresTypesSql)) { - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); @@ -903,34 +883,15 @@ public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) { command.CommandText = InsertPostgresTypesSql; command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); await command.ExecuteNonQueryAsync(); } } - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_boolean, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresTypesBatchArgs { - public bool? CBoolean { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } }; public async Task InsertPostgresTypesBatch(List args) @@ -943,15 +904,6 @@ public async Task InsertPostgresTypesBatch(List ar foreach (var row in args) { await writer.StartRowAsync(); - await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); - await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); - await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); - await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); - await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); - await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); } @@ -962,19 +914,9 @@ public async Task InsertPostgresTypesBatch(List ar } } - private const string GetPostgresTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, c_enum FROM postgres_types LIMIT 1"; + private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; public class GetPostgresTypesRow { - public bool? CBoolean { get; set; } - public byte[] CBit { get; set; } - public short? CSmallint { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public CEnum? CEnum { get; set; } }; @@ -992,18 +934,8 @@ public async Task GetPostgresTypes() { return new GetPostgresTypesRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + CEnum = reader.IsDBNull(1) ? (CEnum? )null : reader.GetString(1).ToCEnum() }; } } @@ -1025,18 +957,8 @@ public async Task GetPostgresTypes() { return new GetPostgresTypesRow { - CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), - CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), - CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), - CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), - CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), - CReal = reader.IsDBNull(5) ? (float? )null : reader.GetFloat(5), - CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDecimal = reader.IsDBNull(7) ? (decimal? )null : reader.GetDecimal(7), - CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), - CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), - CUuid = reader.IsDBNull(10) ? (Guid? )null : reader.GetFieldValue(10), - CEnum = reader.IsDBNull(11) ? (CEnum? )null : reader.GetString(11).ToCEnum() + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + CEnum = reader.IsDBNull(1) ? (CEnum? )null : reader.GetString(1).ToCEnum() }; } } @@ -1045,18 +967,9 @@ public async Task GetPostgresTypes() return null; } - private const string GetPostgresTypesCntSql = "SELECT c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_smallint, c_boolean, c_integer, c_bigint, c_real, c_numeric, c_decimal, c_double_precision, c_money, c_uuid LIMIT 1"; + private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; public class GetPostgresTypesCntRow { - public short? CSmallint { get; set; } - public bool? CBoolean { get; set; } - public int? CInteger { get; set; } - public long? CBigint { get; set; } - public float? CReal { get; set; } - public decimal? CNumeric { get; set; } - public decimal? CDecimal { get; set; } - public double? CDoublePrecision { get; set; } - public decimal? CMoney { get; set; } public Guid? CUuid { get; set; } public long Cnt { get; set; } }; @@ -1074,17 +987,8 @@ public async Task GetPostgresTypesCnt() { return new GetPostgresTypesCntRow { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) }; } } @@ -1106,17 +1010,8 @@ public async Task GetPostgresTypesCnt() { return new GetPostgresTypesCntRow { - CSmallint = reader.IsDBNull(0) ? (short? )null : reader.GetInt16(0), - CBoolean = reader.IsDBNull(1) ? (bool? )null : reader.GetBoolean(1), - CInteger = reader.IsDBNull(2) ? (int? )null : reader.GetInt32(2), - CBigint = reader.IsDBNull(3) ? (long? )null : reader.GetInt64(3), - CReal = reader.IsDBNull(4) ? (float? )null : reader.GetFloat(4), - CNumeric = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), - CDecimal = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), - CDoublePrecision = reader.IsDBNull(7) ? (double? )null : reader.GetDouble(7), - CMoney = reader.IsDBNull(8) ? (decimal? )null : reader.GetDecimal(8), - CUuid = reader.IsDBNull(9) ? (Guid? )null : reader.GetFieldValue(9), - Cnt = reader.GetInt64(10) + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) }; } } @@ -1125,7 +1020,7 @@ public async Task GetPostgresTypesCnt() return null; } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_types CROSS JOIN postgres_string_types CROSS JOIN postgres_datetime_types"; + private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { public int? MaxInteger { get; set; } @@ -1207,6 +1102,291 @@ public async Task TruncatePostgresTypes() } } + private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; + public class InsertPostgresNumericTypesArgs + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task InsertPostgresNumericTypes(InsertPostgresNumericTypesArgs args) + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(InsertPostgresNumericTypesSql)) + { + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = InsertPostgresNumericTypesSql; + command.Transaction = this.Transaction; + command.Parameters.AddWithValue("@c_boolean", args.CBoolean ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bit", args.CBit ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_smallint", args.CSmallint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_integer", args.CInteger ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_bigint", args.CBigint ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_decimal", args.CDecimal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_numeric", args.CNumeric ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_real", args.CReal ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_double_precision", args.CDoublePrecision ?? (object)DBNull.Value); + command.Parameters.AddWithValue("@c_money", args.CMoney ?? (object)DBNull.Value); + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNumericTypesSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1"; + public class GetPostgresNumericTypesRow + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task GetPostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresNumericTypesSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesRow + { + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? (float? )null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNumericTypesSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesRow + { + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? (float? )null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9) + }; + } + } + } + + return null; + } + + private const string TruncatePostgresNumericTypesSql = "TRUNCATE TABLE postgres_numeric_types"; + public async Task TruncatePostgresNumericTypes() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(TruncatePostgresNumericTypesSql)) + { + await command.ExecuteNonQueryAsync(); + } + } + + return; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = TruncatePostgresNumericTypesSql; + command.Transaction = this.Transaction; + await command.ExecuteNonQueryAsync(); + } + } + + private const string GetPostgresNumericTypesCntSql = "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money, COUNT(*) AS cnt FROM postgres_numeric_types GROUP BY c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money LIMIT 1"; + public class GetPostgresNumericTypesCntRow + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresNumericTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresNumericTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesCntRow + { + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? (float? )null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + Cnt = reader.GetInt64(10) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresNumericTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresNumericTypesCntRow + { + CBoolean = reader.IsDBNull(0) ? (bool? )null : reader.GetBoolean(0), + CBit = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CSmallint = reader.IsDBNull(2) ? (short? )null : reader.GetInt16(2), + CInteger = reader.IsDBNull(3) ? (int? )null : reader.GetInt32(3), + CBigint = reader.IsDBNull(4) ? (long? )null : reader.GetInt64(4), + CDecimal = reader.IsDBNull(5) ? (decimal? )null : reader.GetDecimal(5), + CNumeric = reader.IsDBNull(6) ? (decimal? )null : reader.GetDecimal(6), + CReal = reader.IsDBNull(7) ? (float? )null : reader.GetFloat(7), + CDoublePrecision = reader.IsDBNull(8) ? (double? )null : reader.GetDouble(8), + CMoney = reader.IsDBNull(9) ? (decimal? )null : reader.GetDecimal(9), + Cnt = reader.GetInt64(10) + }; + } + } + } + + return null; + } + + private const string InsertPostgresNumericTypesBatchSql = "COPY postgres_numeric_types (c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresNumericTypesBatchArgs + { + public bool? CBoolean { get; set; } + public byte[] CBit { get; set; } + public short? CSmallint { get; set; } + public int? CInteger { get; set; } + public long? CBigint { get; set; } + public decimal? CDecimal { get; set; } + public decimal? CNumeric { get; set; } + public float? CReal { get; set; } + public double? CDoublePrecision { get; set; } + public decimal? CMoney { get; set; } + }; + public async Task InsertPostgresNumericTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresNumericTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CBoolean ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBit ?? (object)DBNull.Value); + await writer.WriteAsync(row.CSmallint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CInteger ?? (object)DBNull.Value); + await writer.WriteAsync(row.CBigint ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDecimal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CNumeric ?? (object)DBNull.Value); + await writer.WriteAsync(row.CReal ?? (object)DBNull.Value); + await writer.WriteAsync(row.CDoublePrecision ?? (object)DBNull.Value); + await writer.WriteAsync(row.CMoney ?? (object)DBNull.Value, NpgsqlDbType.Money); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + private const string InsertPostgresStringTypesSql = " INSERT INTO postgres_string_types ( c_char, c_varchar, c_character_varying, c_bpchar, c_text ) VALUES (@c_char, @c_varchar, @c_character_varying, @c_bpchar, @c_text)"; public class InsertPostgresStringTypesArgs { diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index e836ca89..e44c4cd8 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -121,11 +121,38 @@ }, "columns": [ { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "table": { "name": "postgres_types" }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + } + } + ] + }, + { + "rel": { + "name": "postgres_numeric_types" + }, + "columns": [ + { + "name": "c_boolean", + "length": -1, + "table": { + "name": "postgres_numeric_types" + }, "type": { "schema": "pg_catalog", "name": "bool" @@ -135,7 +162,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -146,7 +173,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -157,7 +184,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -168,7 +195,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -179,7 +206,7 @@ "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -190,7 +217,7 @@ "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -201,7 +228,7 @@ "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -212,7 +239,7 @@ "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -223,31 +250,11 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" } - }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } } ] }, @@ -33459,14 +33466,14 @@ "filename": "query.sql" }, { - "text": "\nINSERT INTO postgres_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12::c_enum\n)", + "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", "name": "InsertPostgresTypes", "cmd": ":exec", "parameters": [ { "number": 1, "column": { - "name": "c_boolean", + "name": "c_uuid", "length": -1, "isNamedParam": true, "table": { @@ -33474,194 +33481,159 @@ "name": "postgres_types" }, "type": { - "name": "pg_catalog.bool" + "name": "uuid" }, - "originalName": "c_boolean" + "originalName": "c_uuid" } }, { "number": 2, "column": { - "name": "c_bit", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.bit" - }, - "originalName": "c_bit" - } - }, - { - "number": 3, - "column": { - "name": "c_smallint", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.int2" - }, - "originalName": "c_smallint" - } - }, - { - "number": 4, - "column": { - "name": "c_integer", + "name": "c_enum", "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, "type": { - "name": "pg_catalog.int4" - }, - "originalName": "c_integer" + "name": "c_enum" + } } - }, + } + ], + "comments": [ + " Special types " + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "number": 5, + "number": 1, "column": { - "name": "c_bigint", + "name": "c_uuid", "length": -1, - "isNamedParam": true, "table": { "schema": "public", "name": "postgres_types" }, "type": { - "name": "pg_catalog.int8" + "name": "uuid" }, - "originalName": "c_bigint" + "originalName": "c_uuid" } - }, + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_types" + } + }, + { + "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", + "name": "GetPostgresTypes", + "cmd": ":one", + "columns": [ { - "number": 6, - "column": { - "name": "c_real", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float4" - }, - "originalName": "c_real" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 7, - "column": { - "name": "c_numeric", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_numeric" - } - }, + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresTypesCnt", + "cmd": ":one", + "columns": [ { - "number": 8, - "column": { - "name": "c_decimal", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.numeric" - }, - "originalName": "c_decimal" - } + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" }, { - "number": 9, - "column": { - "name": "c_double_precision", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "pg_catalog.float8" - }, - "originalName": "c_double_precision" + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" } - }, + } + ], + "filename": "query.sql" + }, + { + "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", + "name": "GetPostgresFunctions", + "cmd": ":one", + "columns": [ { - "number": 10, - "column": { - "name": "c_money", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "money" - }, - "originalName": "c_money" + "name": "max_integer", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 11, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" + "name": "max_varchar", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } }, { - "number": 12, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } + "name": "max_timestamp", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "anyarray" } } ], - "comments": [ - " Basic types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } + "filename": "query.sql" }, { - "text": "INSERT INTO postgres_types\n(\n c_boolean,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\n)\nVALUES (\n $1, \n $2, \n $3, \n $4, \n $5, \n $6, \n $7, \n $8, \n $9, \n $10\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", + "text": "TRUNCATE TABLE postgres_types", + "name": "TruncatePostgresTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypes", + "cmd": ":exec", "parameters": [ { "number": 1, @@ -33670,7 +33642,7 @@ "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.bool" @@ -33681,154 +33653,157 @@ { "number": 2, "column": { - "name": "c_smallint", + "name": "c_bit", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int2" + "name": "pg_catalog.bit" }, - "originalName": "c_smallint" + "originalName": "c_bit" } }, { "number": 3, "column": { - "name": "c_integer", + "name": "c_smallint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int4" + "name": "pg_catalog.int2" }, - "originalName": "c_integer" + "originalName": "c_smallint" } }, { "number": 4, "column": { - "name": "c_bigint", + "name": "c_integer", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.int8" + "name": "pg_catalog.int4" }, - "originalName": "c_bigint" + "originalName": "c_integer" } }, { "number": 5, "column": { - "name": "c_real", + "name": "c_bigint", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float4" + "name": "pg_catalog.int8" }, - "originalName": "c_real" + "originalName": "c_bigint" } }, { "number": 6, "column": { - "name": "c_numeric", + "name": "c_decimal", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_numeric" + "originalName": "c_decimal" } }, { "number": 7, "column": { - "name": "c_decimal", + "name": "c_numeric", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "pg_catalog.numeric" }, - "originalName": "c_decimal" + "originalName": "c_numeric" } }, { "number": 8, "column": { - "name": "c_double_precision", + "name": "c_real", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "pg_catalog.float8" + "name": "pg_catalog.float4" }, - "originalName": "c_double_precision" + "originalName": "c_real" } }, { "number": 9, "column": { - "name": "c_money", + "name": "c_double_precision", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "money" + "name": "pg_catalog.float8" }, - "originalName": "c_money" + "originalName": "c_double_precision" } }, { "number": 10, "column": { - "name": "c_uuid", + "name": "c_money", "length": -1, "table": { "schema": "public", - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "name": "money" }, - "originalName": "c_uuid" + "originalName": "c_money" } } ], + "comments": [ + " Numeric types " + ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_types" + "name": "postgres_numeric_types" } }, { - "text": "SELECT \n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", + "text": "SELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1", + "name": "GetPostgresNumericTypes", "cmd": ":one", "columns": [ { "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33840,7 +33815,7 @@ "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33852,7 +33827,7 @@ "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33864,7 +33839,7 @@ "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33876,7 +33851,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33885,22 +33860,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33909,22 +33884,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33936,48 +33911,56 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" - }, + } + ], + "filename": "query.sql" + }, + { + "text": "TRUNCATE TABLE postgres_numeric_types", + "name": "TruncatePostgresNumericTypes", + "cmd": ":exec", + "filename": "query.sql" + }, + { + "text": "SELECT\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money,\n COUNT(*) AS cnt\nFROM postgres_numeric_types\nGROUP BY\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\nLIMIT 1", + "name": "GetPostgresNumericTypesCnt", + "cmd": ":one", + "columns": [ { - "name": "c_uuid", + "name": "c_boolean", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "uuid" + "schema": "pg_catalog", + "name": "bool" }, - "originalName": "c_uuid" + "originalName": "c_boolean" }, { - "name": "c_enum", + "name": "c_bit", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { - "name": "c_enum" + "schema": "pg_catalog", + "name": "bit" }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_smallint,\n c_boolean,\n c_integer,\n c_bigint,\n c_real,\n c_numeric,\n c_decimal,\n c_double_precision,\n c_money,\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ + "originalName": "c_bit" + }, { "name": "c_smallint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -33985,23 +33968,11 @@ }, "originalName": "c_smallint" }, - { - "name": "c_boolean", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "schema": "pg_catalog", - "name": "bool" - }, - "originalName": "c_boolean" - }, { "name": "c_integer", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34013,7 +33984,7 @@ "name": "c_bigint", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34022,22 +33993,22 @@ "originalName": "c_bigint" }, { - "name": "c_real", + "name": "c_decimal", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "float4" + "name": "numeric" }, - "originalName": "c_real" + "originalName": "c_decimal" }, { "name": "c_numeric", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34046,22 +34017,22 @@ "originalName": "c_numeric" }, { - "name": "c_decimal", + "name": "c_real", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", - "name": "numeric" + "name": "float4" }, - "originalName": "c_decimal" + "originalName": "c_real" }, { "name": "c_double_precision", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "schema": "pg_catalog", @@ -34073,24 +34044,13 @@ "name": "c_money", "length": -1, "table": { - "name": "postgres_types" + "name": "postgres_numeric_types" }, "type": { "name": "money" }, "originalName": "c_money" }, - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, { "name": "cnt", "notNull": true, @@ -34104,45 +34064,165 @@ "filename": "query.sql" }, { - "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_types\nCROSS JOIN postgres_string_types\nCROSS JOIN postgres_datetime_types", - "name": "GetPostgresFunctions", - "cmd": ":one", - "columns": [ + "text": "INSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", + "name": "InsertPostgresNumericTypesBatch", + "cmd": ":copyfrom", + "parameters": [ { - "name": "max_integer", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 1, + "column": { + "name": "c_boolean", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean" } }, { - "name": "max_varchar", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 2, + "column": { + "name": "c_bit", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.bit" + }, + "originalName": "c_bit" } }, { - "name": "max_timestamp", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "anyarray" + "number": 3, + "column": { + "name": "c_smallint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int2" + }, + "originalName": "c_smallint" + } + }, + { + "number": 4, + "column": { + "name": "c_integer", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer" + } + }, + { + "number": 5, + "column": { + "name": "c_bigint", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.int8" + }, + "originalName": "c_bigint" + } + }, + { + "number": 6, + "column": { + "name": "c_decimal", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal" + } + }, + { + "number": 7, + "column": { + "name": "c_numeric", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_numeric" + } + }, + { + "number": 8, + "column": { + "name": "c_real", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float4" + }, + "originalName": "c_real" + } + }, + { + "number": 9, + "column": { + "name": "c_double_precision", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "pg_catalog.float8" + }, + "originalName": "c_double_precision" + } + }, + { + "number": 10, + "column": { + "name": "c_money", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_numeric_types" + }, + "type": { + "name": "money" + }, + "originalName": "c_money" } } ], - "filename": "query.sql" - }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_numeric_types" + } }, { "text": "\nINSERT INTO postgres_string_types\n(\n c_char,\n c_varchar,\n c_character_varying,\n c_bpchar,\n c_text\n)\nVALUES ($1, $2, $3, $4, $5)", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index 80368996..f9b68e43 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunner■ы public"└publicГ +./dist/LocalRunnerъь public"мpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,30 +13,31 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtext▄ -postgres_types< - c_boolean0         Rpostgres_typesb -pg_catalogbool7 -c_bit0         Rpostgres_typesb -pg_catalogbit= - -c_smallint0         Rpostgres_typesb -pg_catalogint2< - c_integer0         Rpostgres_typesb -pg_catalogint4; -c_bigint0         Rpostgres_typesb -pg_catalogint8? - c_decimal0         Rpostgres_typesb -pg_catalognumeric? - c_numeric0         Rpostgres_typesb -pg_catalognumeric; -c_real0         Rpostgres_typesb -pg_catalogfloat4G -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8/ -c_money0         Rpostgres_typesbmoney- + description0         Rbooksbtextr +postgres_types- c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enumч +c_enum0         Rpostgres_typesbc_enum╘ +postgres_numeric_typesD + c_boolean0         Rpostgres_numeric_typesb +pg_catalogbool? +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitE + +c_smallint0         Rpostgres_numeric_typesb +pg_catalogint2D + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4C +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8G + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericG + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericC +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4O +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat87 +c_money0         Rpostgres_numeric_typesbmoneyч postgres_string_typesB c_char0         Rpostgres_string_typesb pg_catalogbpcharF @@ -10324,189 +10325,181 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql╧ - -┴ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ +X INSERT INTO postgres_types ( - c_boolean, - c_bit, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid, c_enum ) VALUES ( $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12::c_enum -)InsertPostgresTypes:exec*TP - c_boolean0         8Rpublicpostgres_typesbpg_catalog.boolz c_boolean*KG -c_bit0         8Rpublicpostgres_typesbpg_catalog.bitzc_bit*VR - -c_smallint0         8Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*TP - c_integer0         8Rpublicpostgres_typesbpg_catalog.int4z c_integer*RN -c_bigint0         8Rpublicpostgres_typesbpg_catalog.int8zc_bigint*PL -c_real0         8Rpublicpostgres_typesbpg_catalog.float4zc_real*WS - c_numeric0         8Rpublicpostgres_typesbpg_catalog.numericz c_numeric*WS - c_decimal0         8Rpublicpostgres_typesbpg_catalog.numericz c_decimal*h d -c_double_precision0         8Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*F -B -c_money0         8Rpublicpostgres_typesbmoneyzc_money*C ? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*!  -c_enum0         bc_enum2 Basic types : query.sqlBpostgres_typesЬ -ШINSERT INTO postgres_types + $2::c_enum +)InsertPostgresTypes:exec*C? +c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ +;INSERT INTO postgres_types ( - c_boolean, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid ) VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 -)InsertPostgresTypesBatch :copyfrom*RN - c_boolean0         Rpublicpostgres_typesbpg_catalog.boolz c_boolean*TP - -c_smallint0         Rpublicpostgres_typesbpg_catalog.int2z -c_smallint*RN - c_integer0         Rpublicpostgres_typesbpg_catalog.int4z c_integer*PL -c_bigint0         Rpublicpostgres_typesbpg_catalog.int8zc_bigint*NJ -c_real0         Rpublicpostgres_typesbpg_catalog.float4zc_real*UQ - c_numeric0         Rpublicpostgres_typesbpg_catalog.numericz c_numeric*UQ - c_decimal0         Rpublicpostgres_typesbpg_catalog.numericz c_decimal*fb -c_double_precision0         Rpublicpostgres_typesbpg_catalog.float8zc_double_precision*D @ -c_money0         Rpublicpostgres_typesbmoneyzc_money*A -= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types├ -╒SELECT + $1 +)InsertPostgresTypesBatch :copyfrom*A= +c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ +:SELECT + c_uuid, + c_enum +FROM postgres_types +LIMIT 1GetPostgresTypes:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid"7 +c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ +VSELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_types +GROUP BY + c_uuid +LIMIT 1GetPostgresTypesCnt:one"5 +c_uuid0         Rpostgres_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql№ +╥SELECT + MAX(c_integer) AS max_integer, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM postgres_datetime_types +CROSS JOIN postgres_numeric_types +CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( + max_integer0         @b +anyarray"( + max_varchar0         @b +anyarray"* + max_timestamp0         @b +anyarray: query.sqlH +TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +э +INSERT INTO postgres_numeric_types +( c_boolean, c_bit, c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"> -c_bit0         Rpostgres_typesb -pg_catalogbitzc_bit"I - -c_smallint0         Rpostgres_typesb + c_money +) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypes:exec*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money2 Numeric types : query.sqlBpostgres_numeric_typesь +ЧSELECT c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money FROM postgres_numeric_types LIMIT 1GetPostgresNumericTypes:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sqlД -ьSELECT - c_smallint, +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money: query.sqlW +%TRUNCATE TABLE postgres_numeric_typesTruncatePostgresNumericTypes:exec: query.sqlъ +ЄSELECT c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, c_money, - c_uuid, COUNT(*) AS cnt -FROM postgres_types +FROM postgres_numeric_types GROUP BY - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid -LIMIT 1GetPostgresTypesCnt:one"I - -c_smallint0         Rpostgres_typesb + c_money +LIMIT 1GetPostgresNumericTypesCnt:one"O + c_boolean0         Rpostgres_numeric_typesb +pg_catalogboolz c_boolean"F +c_bit0         Rpostgres_numeric_typesb +pg_catalogbitzc_bit"Q + +c_smallint0         Rpostgres_numeric_typesb pg_catalogint2z -c_smallint"G - c_boolean0         Rpostgres_typesb -pg_catalogboolz c_boolean"G - c_integer0         Rpostgres_typesb -pg_catalogint4z c_integer"E -c_bigint0         Rpostgres_typesb -pg_catalogint8zc_bigint"C -c_real0         Rpostgres_typesb -pg_catalogfloat4zc_real"J - c_numeric0         Rpostgres_typesb -pg_catalognumericz c_numeric"J - c_decimal0         Rpostgres_typesb -pg_catalognumericz c_decimal"[ -c_double_precision0         Rpostgres_typesb -pg_catalogfloat8zc_double_precision"8 -c_money0         Rpostgres_typesbmoneyzc_money"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sqlЇ -╩SELECT - MAX(c_integer) AS max_integer, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM postgres_types -CROSS JOIN postgres_string_types -CROSS JOIN postgres_datetime_typesGetPostgresFunctions:one"( - max_integer0         @b -anyarray"( - max_varchar0         @b -anyarray"* - max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlй +c_smallint"O + c_integer0         Rpostgres_numeric_typesb +pg_catalogint4z c_integer"M +c_bigint0         Rpostgres_numeric_typesb +pg_catalogint8zc_bigint"R + c_decimal0         Rpostgres_numeric_typesb +pg_catalognumericz c_decimal"R + c_numeric0         Rpostgres_numeric_typesb +pg_catalognumericz c_numeric"K +c_real0         Rpostgres_numeric_typesb +pg_catalogfloat4zc_real"c +c_double_precision0         Rpostgres_numeric_typesb +pg_catalogfloat8zc_double_precision"@ +c_money0         Rpostgres_numeric_typesbmoneyzc_money" +cnt0         @bbigint: query.sql╫ +ьINSERT INTO postgres_numeric_types +( + c_boolean, + c_bit, + c_smallint, + c_integer, + c_bigint, + c_decimal, + c_numeric, + c_real, + c_double_precision, + c_money +) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)InsertPostgresNumericTypesBatch :copyfrom*ZV + c_boolean0         R publicpostgres_numeric_typesbpg_catalog.boolz c_boolean*QM +c_bit0         R publicpostgres_numeric_typesbpg_catalog.bitzc_bit*\X + +c_smallint0         R publicpostgres_numeric_typesbpg_catalog.int2z +c_smallint*ZV + c_integer0         R publicpostgres_numeric_typesbpg_catalog.int4z c_integer*XT +c_bigint0         R publicpostgres_numeric_typesbpg_catalog.int8zc_bigint*]Y + c_decimal0         R publicpostgres_numeric_typesbpg_catalog.numericz c_decimal*]Y + c_numeric0         R publicpostgres_numeric_typesbpg_catalog.numericz c_numeric*VR +c_real0         R publicpostgres_numeric_typesbpg_catalog.float4zc_real*n j +c_double_precision0         R publicpostgres_numeric_typesbpg_catalog.float8zc_double_precision*L +H +c_money0         R publicpostgres_numeric_typesbmoneyzc_money: query.sqlBpostgres_numeric_typesй П INSERT INTO postgres_string_types ( diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index 4a4fc751..86a95910 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -1,32 +1,12 @@ -/* Basic types */ +/* Special types */ -- name: InsertPostgresTypes :exec INSERT INTO postgres_types ( - c_boolean, - c_bit, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid, c_enum ) VALUES ( - sqlc.narg('c_boolean'), - sqlc.narg('c_bit'), - sqlc.narg('c_smallint'), - sqlc.narg('c_integer'), - sqlc.narg('c_bigint'), - sqlc.narg('c_real'), - sqlc.narg('c_numeric'), - sqlc.narg('c_decimal'), - sqlc.narg('c_double_precision'), - sqlc.narg('c_money'), sqlc.narg('c_uuid'), sqlc.narg('c_enum')::c_enum ); @@ -34,85 +14,105 @@ VALUES ( -- name: InsertPostgresTypesBatch :copyfrom INSERT INTO postgres_types ( - c_boolean, - c_smallint, - c_integer, - c_bigint, - c_real, - c_numeric, - c_decimal, - c_double_precision, - c_money, c_uuid ) VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 + $1 ); -- name: GetPostgresTypes :one -SELECT +SELECT + c_uuid, + c_enum +FROM postgres_types +LIMIT 1; + +-- name: GetPostgresTypesCnt :one +SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_types +GROUP BY + c_uuid +LIMIT 1; + +-- name: GetPostgresFunctions :one +SELECT + MAX(c_integer) AS max_integer, + MAX(c_varchar) AS max_varchar, + MAX(c_timestamp) AS max_timestamp +FROM postgres_datetime_types +CROSS JOIN postgres_numeric_types +CROSS JOIN postgres_string_types; + +-- name: TruncatePostgresTypes :exec +TRUNCATE TABLE postgres_types; + +/* Numeric types */ + +-- name: InsertPostgresNumericTypes :exec +INSERT INTO postgres_numeric_types +( c_boolean, c_bit, c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid, - c_enum -FROM postgres_types -LIMIT 1; + c_money +) +VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10); --- name: GetPostgresTypesCnt :one +-- name: GetPostgresNumericTypes :one +SELECT * FROM postgres_numeric_types LIMIT 1; + +-- name: TruncatePostgresNumericTypes :exec +TRUNCATE TABLE postgres_numeric_types; + +-- name: GetPostgresNumericTypesCnt :one SELECT - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, c_money, - c_uuid, COUNT(*) AS cnt -FROM postgres_types +FROM postgres_numeric_types GROUP BY - c_smallint, c_boolean, + c_bit, + c_smallint, c_integer, c_bigint, - c_real, - c_numeric, c_decimal, + c_numeric, + c_real, c_double_precision, - c_money, - c_uuid + c_money LIMIT 1; --- name: GetPostgresFunctions :one -SELECT - MAX(c_integer) AS max_integer, - MAX(c_varchar) AS max_varchar, - MAX(c_timestamp) AS max_timestamp -FROM postgres_types -CROSS JOIN postgres_string_types -CROSS JOIN postgres_datetime_types; - --- name: TruncatePostgresTypes :exec -TRUNCATE TABLE postgres_types; +-- name: InsertPostgresNumericTypesBatch :copyfrom +INSERT INTO postgres_numeric_types +( + c_boolean, + c_bit, + c_smallint, + c_integer, + c_bigint, + c_decimal, + c_numeric, + c_real, + c_double_precision, + c_money +) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10); /* String types */ diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index fa03b367..9b2d6ed0 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -3,7 +3,12 @@ CREATE EXTENSION "uuid-ossp"; CREATE TYPE c_enum AS ENUM ('small', 'medium', 'big'); CREATE TABLE postgres_types ( - /* Numeric Data Types */ + /* Special Data Types */ + c_uuid UUID, + c_enum c_enum +); + +CREATE TABLE postgres_numeric_types ( c_boolean BOOLEAN, c_bit BIT(10), c_smallint SMALLINT, @@ -13,11 +18,7 @@ CREATE TABLE postgres_types ( c_numeric NUMERIC(10, 7), c_real REAL, c_double_precision DOUBLE PRECISION, - c_money MONEY, - - /* Special Data Types */ - c_uuid UUID, - c_enum c_enum + c_money MONEY ); CREATE TABLE postgres_string_types ( From 5d5310c605e1a704b4909ddc827fb2e7a8db448e Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 23 Aug 2025 00:09:22 +0200 Subject: [PATCH 30/33] fix: separate rest of postgres data types to own table --- .../Templates/PostgresTests.cs | 48 +- end2end/EndToEndTests/NpgsqlDapperTester.cs | 3 +- .../NpgsqlDapperTester.generated.cs | 48 +- end2end/EndToEndTests/NpgsqlTester.cs | 3 +- .../EndToEndTests/NpgsqlTester.generated.cs | 48 +- .../EndToEndTestsLegacy/NpgsqlDapperTester.cs | 3 +- .../NpgsqlDapperTester.generated.cs | 48 +- end2end/EndToEndTestsLegacy/NpgsqlTester.cs | 3 +- .../NpgsqlTester.generated.cs | 48 +- examples/NpgsqlDapperExample/Models.cs | 25 +- examples/NpgsqlDapperExample/QuerySql.cs | 188 +++----- examples/NpgsqlDapperExample/request.json | 441 ++++++++---------- examples/NpgsqlDapperExample/request.message | 139 +++--- examples/NpgsqlDapperLegacyExample/Models.cs | 25 +- .../NpgsqlDapperLegacyExample/QuerySql.cs | 188 +++----- .../NpgsqlDapperLegacyExample/request.json | 441 ++++++++---------- .../NpgsqlDapperLegacyExample/request.message | 139 +++--- examples/NpgsqlExample/Models.cs | 3 +- examples/NpgsqlExample/QuerySql.cs | 292 ++++-------- examples/NpgsqlExample/request.json | 441 ++++++++---------- examples/NpgsqlExample/request.message | 139 +++--- examples/NpgsqlLegacyExample/Models.cs | 25 +- examples/NpgsqlLegacyExample/QuerySql.cs | 318 +++++-------- examples/NpgsqlLegacyExample/request.json | 441 ++++++++---------- examples/NpgsqlLegacyExample/request.message | 139 +++--- examples/config/postgresql/types/query.sql | 86 ++-- examples/config/postgresql/types/schema.sql | 72 ++- 27 files changed, 1633 insertions(+), 2161 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index a1d31332..46776aaa 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -476,7 +476,7 @@ public async Task TestPostgresJsonDataTypes( if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, @@ -484,7 +484,7 @@ await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstru CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + var expected = new QuerySql.GetPostgresSpecialTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, @@ -492,10 +492,10 @@ await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstru CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -516,13 +516,13 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G public void TestPostgresInvalidJson() { Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs + QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs + QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); @@ -916,19 +916,19 @@ private static IEnumerable PostgresGuidDataTypesTestCases [TestCaseSource(nameof(PostgresGuidDataTypesTestCases))] public async Task TestPostgresGuidDataTypes(Guid? cUuid) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CUuid = cUuid }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresSpecialTypesRow { CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); } @@ -952,22 +952,22 @@ private static IEnumerable PostgresGuidCopyFromTestCases public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) { var batchArgs = Enumerable.Range(0, batchSize) - .Select(_ => new QuerySql.InsertPostgresTypesBatchArgs + .Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CUuid = cUuid }) .ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); + await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var expected = new QuerySql.GetPostgresSpecialTypesCntRow { Cnt = batchSize, CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresSpecialTypesCnt(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); @@ -990,20 +990,20 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + var expected = new QuerySql.GetPostgresSpecialTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -1020,7 +1020,7 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G public void TestPostgresInvalidXml() { Assert.ThrowsAsync(async () => await - QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs + QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); @@ -1095,19 +1095,19 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.Get [TestCase(null)] public async Task TestPostgresStringTypes(CEnum? cEnum) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow + var expected = new QuerySql.GetPostgresSpecialTypesRow { CEnum = cEnum }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual{{Consts.UnknownRecordValuePlaceholder}}); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); } diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.cs b/end2end/EndToEndTests/NpgsqlDapperTester.cs index 064d1130..6668dea5 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.cs @@ -14,13 +14,12 @@ public partial class NpgsqlDapperTester public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); - await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); - await QuerySql.TruncatePostgresUnstructuredTypes(); + await QuerySql.TruncatePostgresSpecialTypes(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index 2465ddb2..f0358705 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -400,14 +400,14 @@ void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgr [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] @@ -564,14 +564,14 @@ private static IEnumerable PostgresGuidDataTypesTestCases [TestCaseSource(nameof(PostgresGuidDataTypesTestCases))] public async Task TestPostgresGuidDataTypes(Guid? cUuid) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CUuid = cUuid }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CUuid = cUuid }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); } @@ -678,17 +678,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -713,14 +713,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -733,14 +733,14 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G [TestCase(null)] public async Task TestPostgresStringTypes(CEnum? cEnum) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CEnum = cEnum }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); } @@ -876,16 +876,16 @@ private static IEnumerable PostgresGuidCopyFromTestCases [TestCaseSource(nameof(PostgresGuidCopyFromTestCases))] public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CUuid = cUuid }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CUuid = cUuid }).ToList(); + await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresSpecialTypesCntRow { Cnt = batchSize, CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresSpecialTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); diff --git a/end2end/EndToEndTests/NpgsqlTester.cs b/end2end/EndToEndTests/NpgsqlTester.cs index 0ecdc5bd..adddd5ba 100644 --- a/end2end/EndToEndTests/NpgsqlTester.cs +++ b/end2end/EndToEndTests/NpgsqlTester.cs @@ -14,13 +14,12 @@ public partial class NpgsqlTester public async Task EmptyTestsTables() { await QuerySql.TruncateAuthors(); - await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); - await QuerySql.TruncatePostgresUnstructuredTypes(); + await QuerySql.TruncatePostgresSpecialTypes(); } } \ No newline at end of file diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index e6267518..2b35d057 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -400,14 +400,14 @@ void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgr [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] @@ -564,14 +564,14 @@ private static IEnumerable PostgresGuidDataTypesTestCases [TestCaseSource(nameof(PostgresGuidDataTypesTestCases))] public async Task TestPostgresGuidDataTypes(Guid? cUuid) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CUuid = cUuid }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CUuid = cUuid }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); } @@ -678,17 +678,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -713,14 +713,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -733,14 +733,14 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G [TestCase(null)] public async Task TestPostgresStringTypes(CEnum? cEnum) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CEnum = cEnum }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); } @@ -876,16 +876,16 @@ private static IEnumerable PostgresGuidCopyFromTestCases [TestCaseSource(nameof(PostgresGuidCopyFromTestCases))] public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CUuid = cUuid }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CUuid = cUuid }).ToList(); + await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresSpecialTypesCntRow { Cnt = batchSize, CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresSpecialTypesCnt(); AssertSingularEquals(expected, actual.Value); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs index ba19090d..faab83e5 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.cs @@ -14,14 +14,13 @@ public partial class NpgsqlDapperTester public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); - await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); - await QuerySql.TruncatePostgresUnstructuredTypes(); + await QuerySql.TruncatePostgresSpecialTypes(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 4904081a..9fa5851a 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -400,14 +400,14 @@ void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgr [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] @@ -564,14 +564,14 @@ private static IEnumerable PostgresGuidDataTypesTestCases [TestCaseSource(nameof(PostgresGuidDataTypesTestCases))] public async Task TestPostgresGuidDataTypes(Guid? cUuid) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CUuid = cUuid }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CUuid = cUuid }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); } @@ -678,17 +678,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -713,14 +713,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -733,14 +733,14 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G [TestCase(null)] public async Task TestPostgresStringTypes(CEnum? cEnum) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CEnum = cEnum }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); } @@ -876,16 +876,16 @@ private static IEnumerable PostgresGuidCopyFromTestCases [TestCaseSource(nameof(PostgresGuidCopyFromTestCases))] public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CUuid = cUuid }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CUuid = cUuid }).ToList(); + await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresSpecialTypesCntRow { Cnt = batchSize, CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresSpecialTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs index bc7d976b..94ffb27c 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.cs @@ -14,14 +14,13 @@ public partial class NpgsqlTester public async Task EmptyTestsTable() { await QuerySql.TruncateAuthors(); - await QuerySql.TruncatePostgresTypes(); await QuerySql.TruncatePostgresNumericTypes(); await QuerySql.TruncatePostgresStringTypes(); await QuerySql.TruncatePostgresDateTimeTypes(); await QuerySql.TruncatePostgresGeoTypes(); await QuerySql.TruncatePostgresNetworkTypes(); await QuerySql.TruncatePostgresArrayTypes(); - await QuerySql.TruncatePostgresUnstructuredTypes(); + await QuerySql.TruncatePostgresSpecialTypes(); } } } \ No newline at end of file diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 89c31c17..221d1ed8 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -400,14 +400,14 @@ void AssertSingularEquals(QuerySql.GetPostgresFunctionsRow x, QuerySql.GetPostgr [Test] public void TestPostgresInvalidJson() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonStringOverride = "SOME INVALID JSON" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJsonpath = "SOME INVALID JSONPATH" })); } [Test] public void TestPostgresInvalidXml() { - Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); + Assert.ThrowsAsync(async () => await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXmlStringOverride = "SOME INVALID XML" })); } [Test] @@ -564,14 +564,14 @@ private static IEnumerable PostgresGuidDataTypesTestCases [TestCaseSource(nameof(PostgresGuidDataTypesTestCases))] public async Task TestPostgresGuidDataTypes(Guid? cUuid) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CUuid = cUuid }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CUuid = cUuid }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); } @@ -678,17 +678,17 @@ public async Task TestPostgresJsonDataTypes(string cJson, string cJsonpath) JsonElement? cParsedJson = null; if (cJson != null) cParsedJson = JsonDocument.Parse(cJson).RootElement; - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CJson = cParsedJson, CJsonb = cParsedJson, CJsonStringOverride = cJson, CJsonpath = cJsonpath }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CJson.HasValue, Is.EqualTo(y.CJson.HasValue)); if (x.CJson.HasValue) @@ -713,14 +713,14 @@ public async Task TestPostgresXmlDataTypes(string cXml) parsedXml.LoadXml(cXml); } - await QuerySql.InsertPostgresUnstructuredTypes(new QuerySql.InsertPostgresUnstructuredTypesArgs { CXml = parsedXml }); - var expected = new QuerySql.GetPostgresUnstructuredTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CXml = parsedXml }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CXml = parsedXml }; - var actual = await QuerySql.GetPostgresUnstructuredTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.GetPostgresUnstructuredTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CXml == null, Is.EqualTo(y.CXml == null)); if (x.CXml != null) @@ -733,14 +733,14 @@ void AssertSingularEquals(QuerySql.GetPostgresUnstructuredTypesRow x, QuerySql.G [TestCase(null)] public async Task TestPostgresStringTypes(CEnum? cEnum) { - await QuerySql.InsertPostgresTypes(new QuerySql.InsertPostgresTypesArgs { CEnum = cEnum }); - var expected = new QuerySql.GetPostgresTypesRow + await QuerySql.InsertPostgresSpecialTypes(new QuerySql.InsertPostgresSpecialTypesArgs { CEnum = cEnum }); + var expected = new QuerySql.GetPostgresSpecialTypesRow { CEnum = cEnum }; - var actual = await QuerySql.GetPostgresTypes(); + var actual = await QuerySql.GetPostgresSpecialTypes(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesRow x, QuerySql.GetPostgresTypesRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesRow x, QuerySql.GetPostgresSpecialTypesRow y) { Assert.That(x.CEnum, Is.EqualTo(y.CEnum)); } @@ -876,16 +876,16 @@ private static IEnumerable PostgresGuidCopyFromTestCases [TestCaseSource(nameof(PostgresGuidCopyFromTestCases))] public async Task TestPostgresGuidCopyFrom(int batchSize, Guid? cUuid) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresTypesBatchArgs { CUuid = cUuid }).ToList(); - await QuerySql.InsertPostgresTypesBatch(batchArgs); - var expected = new QuerySql.GetPostgresTypesCntRow + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresSpecialTypesBatchArgs { CUuid = cUuid }).ToList(); + await QuerySql.InsertPostgresSpecialTypesBatch(batchArgs); + var expected = new QuerySql.GetPostgresSpecialTypesCntRow { Cnt = batchSize, CUuid = cUuid }; - var actual = await QuerySql.GetPostgresTypesCnt(); + var actual = await QuerySql.GetPostgresSpecialTypesCnt(); AssertSingularEquals(expected, actual); - void AssertSingularEquals(QuerySql.GetPostgresTypesCntRow x, QuerySql.GetPostgresTypesCntRow y) + void AssertSingularEquals(QuerySql.GetPostgresSpecialTypesCntRow x, QuerySql.GetPostgresSpecialTypesCntRow y) { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CUuid, Is.EqualTo(y.CUuid)); diff --git a/examples/NpgsqlDapperExample/Models.cs b/examples/NpgsqlDapperExample/Models.cs index 44639a8e..923b9b7a 100644 --- a/examples/NpgsqlDapperExample/Models.cs +++ b/examples/NpgsqlDapperExample/Models.cs @@ -22,11 +22,6 @@ public class Book public required long AuthorId { get; init; } public string? Description { get; init; } }; -public class PostgresType -{ - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } -}; public class PostgresNumericType { public bool? CBoolean { get; init; } @@ -63,15 +58,6 @@ public class PostgresNetworkType public PhysicalAddress? CMacaddr { get; init; } public string? CMacaddr8 { get; init; } }; -public class PostgresUnstructuredType -{ - public JsonElement? CJson { get; init; } - public JsonElement? CJsonStringOverride { get; init; } - public JsonElement? CJsonb { get; init; } - public string? CJsonpath { get; init; } - public XmlDocument? CXml { get; init; } - public XmlDocument? CXmlStringOverride { get; init; } -}; public class PostgresArrayType { public byte[]? CBytea { get; init; } @@ -92,6 +78,17 @@ public class PostgresGeometricType public NpgsqlPolygon? CPolygon { get; init; } public NpgsqlCircle? CCircle { get; init; } }; +public class PostgresSpecialType +{ + public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } + public JsonElement? CJson { get; init; } + public JsonElement? CJsonStringOverride { get; init; } + public JsonElement? CJsonb { get; init; } + public string? CJsonpath { get; init; } + public XmlDocument? CXml { get; init; } + public XmlDocument? CXmlStringOverride { get; init; } +}; public class ExtendedBio { public required string AuthorName { get; init; } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index c4f8d91f..f8118418 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -562,98 +562,6 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; - public class InsertPostgresTypesArgs - { - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } - }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs - { - public Guid? CUuid { get; init; } - }; - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CUuid); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow - { - public Guid? CUuid { get; init; } - public CEnum? CEnum { get; init; } - }; - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); - } - - private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow - { - public Guid? CUuid { get; init; } - public required long Cnt { get; init; } - }; - public async Task GetPostgresTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); - } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { @@ -677,21 +585,6 @@ public class GetPostgresFunctionsRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); - } - private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; public class InsertPostgresNumericTypesArgs { @@ -1255,8 +1148,8 @@ public async Task InsertPostgresNetworkTypesBatch(List(); queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); @@ -1274,20 +1169,22 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType queryParams.Add("c_jsonpath", args.CJsonpath); queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); queryParams.Add("c_xml_string_override", args.CXmlStringOverride); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + await connection.ExecuteAsync(InsertPostgresSpecialTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresSpecialTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public class GetPostgresUnstructuredTypesRow + private const string GetPostgresSpecialTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_uuid, c_enum FROM postgres_special_types LIMIT 1"; + public class GetPostgresSpecialTypesRow { public JsonElement? CJson { get; init; } public string? CJsonStringOverride { get; init; } @@ -1295,36 +1192,85 @@ public class GetPostgresUnstructuredTypesRow public string? CJsonpath { get; init; } public XmlDocument? CXml { get; init; } public string? CXmlStringOverride { get; init; } + public Guid? CUuid { get; init; } + public CEnum? CEnum { get; init; } }; - public async Task GetPostgresUnstructuredTypes() + public async Task GetPostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string TruncatePostgresSpecialTypesSql = "TRUNCATE TABLE postgres_special_types"; + public async Task TruncatePostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + await connection.ExecuteAsync(TruncatePostgresSpecialTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresSpecialTypesSql, transaction: this.Transaction); + } + + private const string InsertPostgresSpecialTypesBatchSql = "COPY postgres_special_types (c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresSpecialTypesBatchArgs + { + public Guid? CUuid { get; init; } + }; + public async Task InsertPostgresSpecialTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresSpecialTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CUuid); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresSpecialTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_special_types GROUP BY c_uuid LIMIT 1"; + public class GetPostgresSpecialTypesCntRow + { + public Guid? CUuid { get; init; } + public required long Cnt { get; init; } + }; + public async Task GetPostgresSpecialTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesCntSql, transaction: this.Transaction); } private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index ad7ede22..caac45f6 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -115,33 +115,6 @@ } ] }, - { - "rel": { - "name": "postgres_types" - }, - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } - } - ] - }, { "rel": { "name": "postgres_numeric_types" @@ -426,73 +399,6 @@ } ] }, - { - "rel": { - "name": "postgres_unstructured_types" - }, - "columns": [ - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - } - ] - }, { "rel": { "name": "postgres_array_types" @@ -662,6 +568,93 @@ } } ] + }, + { + "rel": { + "name": "postgres_special_types" + }, + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + } + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_jsonb", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonb" + } + }, + { + "name": "c_jsonpath", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonpath" + } + }, + { + "name": "c_xml", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + } + ] } ], "enums": [ @@ -33465,130 +33458,6 @@ "cmd": ":exec", "filename": "query.sql" }, - { - "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 2, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } - } - } - ], - "comments": [ - " Special types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, { "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", @@ -33624,12 +33493,6 @@ ], "filename": "query.sql" }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" - }, { "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", "name": "InsertPostgresNumericTypes", @@ -35169,8 +35032,8 @@ } }, { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", + "text": "\nINSERT INTO postgres_special_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml,\n $7,\n $8::c_enum\n)", + "name": "InsertPostgresSpecialTypes", "cmd": ":exec", "parameters": [ { @@ -35232,26 +35095,52 @@ "name": "xml" } } + }, + { + "number": 7, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 8, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } } ], "comments": [ - " Unstructured types " + " Special types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" } }, { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\nFROM postgres_special_types \nLIMIT 1", + "name": "GetPostgresSpecialTypes", "cmd": ":one", "columns": [ { "name": "c_json", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35262,7 +35151,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35273,7 +35162,7 @@ "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonb" @@ -35284,7 +35173,7 @@ "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonpath" @@ -35295,7 +35184,7 @@ "name": "c_xml", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" @@ -35306,22 +35195,98 @@ "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" }, "originalName": "c_xml_string_override" + }, + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", + "text": "TRUNCATE TABLE postgres_special_types", + "name": "TruncatePostgresSpecialTypes", "cmd": ":exec", "filename": "query.sql" }, + { + "text": "INSERT INTO postgres_special_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresSpecialTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_uuid", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_special_types" + } + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_special_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresSpecialTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index 715eae5c..ab300659 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb▄ examples/NpgsqlDapperExamplecsharpЭ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":true}* -./dist/LocalRunnerъь public"мpublicГ +./dist/LocalRunner├ь public"ЕpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,10 +13,7 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextr -postgres_types- -c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enum╘ + description0         Rbooksbtext╘ postgres_numeric_typesD c_boolean0         Rpostgres_numeric_typesb pg_catalogbool? @@ -64,16 +61,7 @@ pg_cataloginterval c_macaddr0         Rpostgres_network_typesb macaddr= c_macaddr80         Rpostgres_network_typesb -macaddr8н -postgres_unstructured_types: -c_json0         Rpostgres_unstructured_typesbjsonJ -c_json_string_override0         Rpostgres_unstructured_typesbjson< -c_jsonb0         Rpostgres_unstructured_typesbjsonbB - -c_jsonpath0         Rpostgres_unstructured_typesb -jsonpath8 -c_xml0         Rpostgres_unstructured_typesbxmlH -c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ +macaddr8Х postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -93,7 +81,18 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" +c_circle0         Rpostgres_geometric_typesbcircle· +postgres_special_types5 +c_uuid0         Rpostgres_special_typesbuuid7 +c_enum0         Rpostgres_special_typesbc_enum5 +c_json0         Rpostgres_special_typesbjsonE +c_json_string_override0         Rpostgres_special_typesbjson7 +c_jsonb0         Rpostgres_special_typesbjsonb= + +c_jsonpath0         Rpostgres_special_typesb +jsonpath3 +c_xml0         Rpostgres_special_typesbxmlC +c_xml_string_override0         Rpostgres_special_typesbxml" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10325,43 +10324,7 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ -X -INSERT INTO postgres_types -( - c_uuid, - c_enum -) -VALUES ( - $1, - $2::c_enum -)InsertPostgresTypes:exec*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ -;INSERT INTO postgres_types -( - c_uuid -) -VALUES ( - $1 -)InsertPostgresTypesBatch :copyfrom*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ -:SELECT - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ -VSELECT - c_uuid, - COUNT(*) AS cnt -FROM postgres_types -GROUP BY - c_uuid -LIMIT 1GetPostgresTypesCnt:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sql№ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql№ ╥SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, @@ -10374,8 +10337,7 @@ CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +anyarray: query.sqlр э INSERT INTO postgres_numeric_types ( @@ -10708,16 +10670,18 @@ LIMIT 1GetPostgresNetworkTypesCnt:one"= ) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE c_inet0         R publicpostgres_network_typesbinetzc_inet*RN - c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ -∙ -INSERT INTO postgres_unstructured_types + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types▄ +д +INSERT INTO postgres_special_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override + c_xml_string_override, + c_uuid, + c_enum ) VALUES ( $1::json, @@ -10725,8 +10689,10 @@ VALUES ( $3::jsonb, $4::jsonpath, $5::xml, - $6::xml -)InsertPostgresUnstructuredTypes:exec* + $6::xml, + $7, + $8::c_enum +)InsertPostgresSpecialTypes:exec* c_json0         bjson*/+ c_json_string_override0         bjson*! c_jsonb0         bjsonb*'# @@ -10734,26 +10700,49 @@ VALUES ( c_jsonpath0         b jsonpath* c_xml0         bxml*-) -c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн -ЪSELECT +c_xml_string_override0         bxml*KG +c_uuid0         8R publicpostgres_special_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_special_typesЭ +нSELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override -FROM postgres_unstructured_types -LIMIT 1GetPostgresUnstructuredTypes:one"B -c_json0         Rpostgres_unstructured_typesbjsonzc_json"b -c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E -c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N - -c_jsonpath0         Rpostgres_unstructured_typesb + c_xml_string_override, + c_uuid, + c_enum +FROM postgres_special_types +LIMIT 1GetPostgresSpecialTypes:one"= +c_json0         Rpostgres_special_typesbjsonzc_json"] +c_json_string_override0         Rpostgres_special_typesbjsonzc_json_string_override"@ +c_jsonb0         Rpostgres_special_typesbjsonbzc_jsonb"I + +c_jsonpath0         Rpostgres_special_typesb jsonpathz -c_jsonpath"? -c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ -c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla -*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +c_jsonpath": +c_xml0         Rpostgres_special_typesbxmlzc_xml"Z +c_xml_string_override0         Rpostgres_special_typesbxmlzc_xml_string_override"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid"? +c_enum0         Rpostgres_special_typesbc_enumzc_enum: query.sqlW +%TRUNCATE TABLE postgres_special_typesTruncatePostgresSpecialTypes:exec: query.sqlс +CINSERT INTO postgres_special_types +( + c_uuid +) +VALUES ( + $1 +)InsertPostgresSpecialTypesBatch :copyfrom*IE +c_uuid0         R publicpostgres_special_typesbuuidzc_uuid: query.sqlBpostgres_special_typesь +^SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_special_types +GROUP BY + c_uuid +LIMIT 1GetPostgresSpecialTypesCnt:one"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql▌ ╧ INSERT INTO postgres_array_types ( diff --git a/examples/NpgsqlDapperLegacyExample/Models.cs b/examples/NpgsqlDapperLegacyExample/Models.cs index a00482ec..31723a2e 100644 --- a/examples/NpgsqlDapperLegacyExample/Models.cs +++ b/examples/NpgsqlDapperLegacyExample/Models.cs @@ -23,11 +23,6 @@ public class Book public long AuthorId { get; set; } public string Description { get; set; } }; - public class PostgresType - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; public class PostgresNumericType { public bool? CBoolean { get; set; } @@ -64,15 +59,6 @@ public class PostgresNetworkType public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } }; - public class PostgresUnstructuredType - { - public JsonElement? CJson { get; set; } - public JsonElement? CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public XmlDocument CXmlStringOverride { get; set; } - }; public class PostgresArrayType { public byte[] CBytea { get; set; } @@ -93,6 +79,17 @@ public class PostgresGeometricType public NpgsqlPolygon? CPolygon { get; set; } public NpgsqlCircle? CCircle { get; set; } }; + public class PostgresSpecialType + { + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } + public JsonElement? CJson { get; set; } + public JsonElement? CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } + }; public class ExtendedBio { public string AuthorName { get; set; } diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 65a76398..9fb25e0b 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -563,98 +563,6 @@ public async Task TruncateExtendedBios() await this.Transaction.Connection.ExecuteAsync(TruncateExtendedBiosSql, transaction: this.Transaction); } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; - public class InsertPostgresTypesArgs - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) - { - var queryParams = new Dictionary(); - queryParams.Add("c_uuid", args.CUuid); - queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresTypesSql, queryParams); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresTypesSql, queryParams, transaction: this.Transaction); - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs - { - public Guid? CUuid { get; set; } - }; - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CUuid); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesSql, transaction: this.Transaction); - } - - private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow - { - public Guid? CUuid { get; set; } - public long Cnt { get; set; } - }; - public async Task GetPostgresTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql); - return result; - } - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresTypesCntSql, transaction: this.Transaction); - } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { @@ -678,21 +586,6 @@ public async Task GetPostgresFunctions() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresFunctionsSql, transaction: this.Transaction); } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresTypesSql); - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresTypesSql, transaction: this.Transaction); - } - private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; public class InsertPostgresNumericTypesArgs { @@ -1256,8 +1149,8 @@ public async Task InsertPostgresNetworkTypesBatch(List(); queryParams.Add("c_json", args.CJson.HasValue ? args.CJson.Value.GetRawText() : null); @@ -1275,20 +1170,22 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType queryParams.Add("c_jsonpath", args.CJsonpath); queryParams.Add("c_xml", args.CXml != null ? args.CXml.OuterXml : null); queryParams.Add("c_xml_string_override", args.CXmlStringOverride); + queryParams.Add("c_uuid", args.CUuid); + queryParams.Add("c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : null); if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams); + await connection.ExecuteAsync(InsertPostgresSpecialTypesSql, queryParams); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(InsertPostgresUnstructuredTypesSql, queryParams, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(InsertPostgresSpecialTypesSql, queryParams, transaction: this.Transaction); } - private const string GetPostgresUnstructuredTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override FROM postgres_unstructured_types LIMIT 1"; - public class GetPostgresUnstructuredTypesRow + private const string GetPostgresSpecialTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_uuid, c_enum FROM postgres_special_types LIMIT 1"; + public class GetPostgresSpecialTypesRow { public JsonElement? CJson { get; set; } public string CJsonStringOverride { get; set; } @@ -1296,36 +1193,85 @@ public class GetPostgresUnstructuredTypesRow public string CJsonpath { get; set; } public XmlDocument CXml { get; set; } public string CXmlStringOverride { get; set; } + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } }; - public async Task GetPostgresUnstructuredTypes() + public async Task GetPostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) { - var result = await connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql); + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesSql); return result; } } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresUnstructuredTypesSql, transaction: this.Transaction); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesSql, transaction: this.Transaction); } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string TruncatePostgresSpecialTypesSql = "TRUNCATE TABLE postgres_special_types"; + public async Task TruncatePostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = new NpgsqlConnection(ConnectionString)) - await connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql); + await connection.ExecuteAsync(TruncatePostgresSpecialTypesSql); return; } if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) throw new InvalidOperationException("Transaction is provided, but its connection is null."); - await this.Transaction.Connection.ExecuteAsync(TruncatePostgresUnstructuredTypesSql, transaction: this.Transaction); + await this.Transaction.Connection.ExecuteAsync(TruncatePostgresSpecialTypesSql, transaction: this.Transaction); + } + + private const string InsertPostgresSpecialTypesBatchSql = "COPY postgres_special_types (c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresSpecialTypesBatchArgs + { + public Guid? CUuid { get; set; } + }; + public async Task InsertPostgresSpecialTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresSpecialTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CUuid); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresSpecialTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_special_types GROUP BY c_uuid LIMIT 1"; + public class GetPostgresSpecialTypesCntRow + { + public Guid? CUuid { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresSpecialTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + var result = await connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesCntSql); + return result; + } + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresSpecialTypesCntSql, transaction: this.Transaction); } private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 0b53a349..9db0a4eb 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -115,33 +115,6 @@ } ] }, - { - "rel": { - "name": "postgres_types" - }, - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } - } - ] - }, { "rel": { "name": "postgres_numeric_types" @@ -426,73 +399,6 @@ } ] }, - { - "rel": { - "name": "postgres_unstructured_types" - }, - "columns": [ - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - } - ] - }, { "rel": { "name": "postgres_array_types" @@ -662,6 +568,93 @@ } } ] + }, + { + "rel": { + "name": "postgres_special_types" + }, + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + } + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_jsonb", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonb" + } + }, + { + "name": "c_jsonpath", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonpath" + } + }, + { + "name": "c_xml", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + } + ] } ], "enums": [ @@ -33465,130 +33458,6 @@ "cmd": ":exec", "filename": "query.sql" }, - { - "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 2, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } - } - } - ], - "comments": [ - " Special types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, { "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", @@ -33624,12 +33493,6 @@ ], "filename": "query.sql" }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" - }, { "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", "name": "InsertPostgresNumericTypes", @@ -35169,8 +35032,8 @@ } }, { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", + "text": "\nINSERT INTO postgres_special_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml,\n $7,\n $8::c_enum\n)", + "name": "InsertPostgresSpecialTypes", "cmd": ":exec", "parameters": [ { @@ -35232,26 +35095,52 @@ "name": "xml" } } + }, + { + "number": 7, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 8, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } } ], "comments": [ - " Unstructured types " + " Special types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" } }, { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\nFROM postgres_special_types \nLIMIT 1", + "name": "GetPostgresSpecialTypes", "cmd": ":one", "columns": [ { "name": "c_json", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35262,7 +35151,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35273,7 +35162,7 @@ "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonb" @@ -35284,7 +35173,7 @@ "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonpath" @@ -35295,7 +35184,7 @@ "name": "c_xml", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" @@ -35306,22 +35195,98 @@ "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" }, "originalName": "c_xml_string_override" + }, + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", + "text": "TRUNCATE TABLE postgres_special_types", + "name": "TruncatePostgresSpecialTypes", "cmd": ":exec", "filename": "query.sql" }, + { + "text": "INSERT INTO postgres_special_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresSpecialTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_uuid", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_special_types" + } + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_special_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresSpecialTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index 86c379b9..d8e8b880 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbЁ "examples/NpgsqlDapperLegacyExamplecsharpл{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlDapperLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":true}* -./dist/LocalRunnerъь public"мpublicГ +./dist/LocalRunner├ь public"ЕpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,10 +13,7 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextr -postgres_types- -c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enum╘ + description0         Rbooksbtext╘ postgres_numeric_typesD c_boolean0         Rpostgres_numeric_typesb pg_catalogbool? @@ -64,16 +61,7 @@ pg_cataloginterval c_macaddr0         Rpostgres_network_typesb macaddr= c_macaddr80         Rpostgres_network_typesb -macaddr8н -postgres_unstructured_types: -c_json0         Rpostgres_unstructured_typesbjsonJ -c_json_string_override0         Rpostgres_unstructured_typesbjson< -c_jsonb0         Rpostgres_unstructured_typesbjsonbB - -c_jsonpath0         Rpostgres_unstructured_typesb -jsonpath8 -c_xml0         Rpostgres_unstructured_typesbxmlH -c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ +macaddr8Х postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -93,7 +81,18 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" +c_circle0         Rpostgres_geometric_typesbcircle· +postgres_special_types5 +c_uuid0         Rpostgres_special_typesbuuid7 +c_enum0         Rpostgres_special_typesbc_enum5 +c_json0         Rpostgres_special_typesbjsonE +c_json_string_override0         Rpostgres_special_typesbjson7 +c_jsonb0         Rpostgres_special_typesbjsonb= + +c_jsonpath0         Rpostgres_special_typesb +jsonpath3 +c_xml0         Rpostgres_special_typesbxmlC +c_xml_string_override0         Rpostgres_special_typesbxml" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10325,43 +10324,7 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ -X -INSERT INTO postgres_types -( - c_uuid, - c_enum -) -VALUES ( - $1, - $2::c_enum -)InsertPostgresTypes:exec*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ -;INSERT INTO postgres_types -( - c_uuid -) -VALUES ( - $1 -)InsertPostgresTypesBatch :copyfrom*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ -:SELECT - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ -VSELECT - c_uuid, - COUNT(*) AS cnt -FROM postgres_types -GROUP BY - c_uuid -LIMIT 1GetPostgresTypesCnt:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sql№ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql№ ╥SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, @@ -10374,8 +10337,7 @@ CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +anyarray: query.sqlр э INSERT INTO postgres_numeric_types ( @@ -10708,16 +10670,18 @@ LIMIT 1GetPostgresNetworkTypesCnt:one"= ) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE c_inet0         R publicpostgres_network_typesbinetzc_inet*RN - c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ -∙ -INSERT INTO postgres_unstructured_types + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types▄ +д +INSERT INTO postgres_special_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override + c_xml_string_override, + c_uuid, + c_enum ) VALUES ( $1::json, @@ -10725,8 +10689,10 @@ VALUES ( $3::jsonb, $4::jsonpath, $5::xml, - $6::xml -)InsertPostgresUnstructuredTypes:exec* + $6::xml, + $7, + $8::c_enum +)InsertPostgresSpecialTypes:exec* c_json0         bjson*/+ c_json_string_override0         bjson*! c_jsonb0         bjsonb*'# @@ -10734,26 +10700,49 @@ VALUES ( c_jsonpath0         b jsonpath* c_xml0         bxml*-) -c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн -ЪSELECT +c_xml_string_override0         bxml*KG +c_uuid0         8R publicpostgres_special_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_special_typesЭ +нSELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override -FROM postgres_unstructured_types -LIMIT 1GetPostgresUnstructuredTypes:one"B -c_json0         Rpostgres_unstructured_typesbjsonzc_json"b -c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E -c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N - -c_jsonpath0         Rpostgres_unstructured_typesb + c_xml_string_override, + c_uuid, + c_enum +FROM postgres_special_types +LIMIT 1GetPostgresSpecialTypes:one"= +c_json0         Rpostgres_special_typesbjsonzc_json"] +c_json_string_override0         Rpostgres_special_typesbjsonzc_json_string_override"@ +c_jsonb0         Rpostgres_special_typesbjsonbzc_jsonb"I + +c_jsonpath0         Rpostgres_special_typesb jsonpathz -c_jsonpath"? -c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ -c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla -*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +c_jsonpath": +c_xml0         Rpostgres_special_typesbxmlzc_xml"Z +c_xml_string_override0         Rpostgres_special_typesbxmlzc_xml_string_override"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid"? +c_enum0         Rpostgres_special_typesbc_enumzc_enum: query.sqlW +%TRUNCATE TABLE postgres_special_typesTruncatePostgresSpecialTypes:exec: query.sqlс +CINSERT INTO postgres_special_types +( + c_uuid +) +VALUES ( + $1 +)InsertPostgresSpecialTypesBatch :copyfrom*IE +c_uuid0         R publicpostgres_special_typesbuuidzc_uuid: query.sqlBpostgres_special_typesь +^SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_special_types +GROUP BY + c_uuid +LIMIT 1GetPostgresSpecialTypesCnt:one"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql▌ ╧ INSERT INTO postgres_array_types ( diff --git a/examples/NpgsqlExample/Models.cs b/examples/NpgsqlExample/Models.cs index f5522631..a30721ec 100644 --- a/examples/NpgsqlExample/Models.cs +++ b/examples/NpgsqlExample/Models.cs @@ -11,14 +11,13 @@ namespace NpgsqlExampleGen; public readonly record struct Author(long Id, string Name, string? Bio); public readonly record struct Book(Guid Id, string Name, long AuthorId, string? Description); -public readonly record struct PostgresType(Guid? CUuid, CEnum? CEnum); public readonly record struct PostgresNumericType(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); public readonly record struct PostgresStringType(string? CChar, string? CVarchar, string? CCharacterVarying, string? CBpchar, string? CText); public readonly record struct PostgresDatetimeType(DateTime? CDate, TimeSpan? CTime, DateTime? CTimestamp, DateTime? CTimestampWithTz, TimeSpan? CInterval); public readonly record struct PostgresNetworkType(NpgsqlCidr? CCidr, IPAddress? CInet, PhysicalAddress? CMacaddr, string? CMacaddr8); -public readonly record struct PostgresUnstructuredType(JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); public readonly record struct PostgresArrayType(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public readonly record struct PostgresGeometricType(NpgsqlPoint? CPoint, NpgsqlLine? CLine, NpgsqlLSeg? CLseg, NpgsqlBox? CBox, NpgsqlPath? CPath, NpgsqlPolygon? CPolygon, NpgsqlCircle? CCircle); +public readonly record struct PostgresSpecialType(Guid? CUuid, CEnum? CEnum, JsonElement? CJson, JsonElement? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, XmlDocument? CXmlStringOverride); public readonly record struct ExtendedBio(string AuthorName, string Name, ExtendedBioType? BioType); public enum CEnum { diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index 09def850..a0fc16be 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -743,157 +743,6 @@ public async Task TruncateExtendedBios() } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; - public readonly record struct InsertPostgresTypesArgs(Guid? CUuid, CEnum? CEnum); - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) - { - using (var command = connection.CreateCommand(InsertPostgresTypesSql)) - { - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertPostgresTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresTypesBatchArgs(Guid? CUuid); - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; - public readonly record struct GetPostgresTypesRow(Guid? CUuid, CEnum? CEnum); - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) - { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesRow - { - CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CEnum = reader.IsDBNull(1) ? null : reader.GetString(1).ToCEnum() - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetPostgresTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesRow - { - CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - CEnum = reader.IsDBNull(1) ? null : reader.GetString(1).ToCEnum() - }; - } - } - } - - return null; - } - - private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; - public readonly record struct GetPostgresTypesCntRow(Guid? CUuid, long Cnt); - public async Task GetPostgresTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) - { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetPostgresTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } - } - } - - return null; - } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public readonly record struct GetPostgresFunctionsRow(int? MaxInteger, string? MaxVarchar, DateTime MaxTimestamp); public async Task GetPostgresFunctions() @@ -945,32 +794,6 @@ public async Task InsertPostgresTypesBatch(List ar return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString!)) - { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncatePostgresTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; public readonly record struct InsertPostgresNumericTypesArgs(bool? CBoolean, byte[]? CBit, short? CSmallint, int? CInteger, long? CBigint, decimal? CDecimal, decimal? CNumeric, float? CReal, double? CDoublePrecision, decimal? CMoney); public async Task InsertPostgresNumericTypes(InsertPostgresNumericTypesArgs args) @@ -1856,15 +1679,15 @@ public async Task InsertPostgresNetworkTypesBatch(List GetPostgresUnstructuredTypes() + private const string GetPostgresSpecialTypesSql = "SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, c_xml_string_override, c_uuid, c_enum FROM postgres_special_types LIMIT 1"; + public readonly record struct GetPostgresSpecialTypesRow(JsonElement? CJson, string? CJsonStringOverride, JsonElement? CJsonb, string? CJsonpath, XmlDocument? CXml, string? CXmlStringOverride, Guid? CUuid, CEnum? CEnum); + public async Task GetPostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetPostgresSpecialTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresSpecialTypesRow { CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), @@ -1921,7 +1748,9 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5), + CUuid = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CEnum = reader.IsDBNull(7) ? null : reader.GetString(7).ToCEnum() }; } } @@ -1935,13 +1764,13 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetPostgresSpecialTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresSpecialTypesRow { CJson = reader.IsDBNull(0) ? null : JsonSerializer.Deserialize(reader.GetString(0)), CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), @@ -1953,7 +1782,9 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5), + CUuid = reader.IsDBNull(6) ? null : reader.GetFieldValue(6), + CEnum = reader.IsDBNull(7) ? null : reader.GetString(7).ToCEnum() }; } } @@ -1962,14 +1793,14 @@ public async Task InsertPostgresUnstructuredTypes(InsertPostgresUnstructuredType return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string TruncatePostgresSpecialTypesSql = "TRUNCATE TABLE postgres_special_types"; + public async Task TruncatePostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString!)) { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresSpecialTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -1982,12 +1813,83 @@ public async Task TruncatePostgresUnstructuredTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresUnstructuredTypesSql; + command.CommandText = TruncatePostgresSpecialTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } + private const string InsertPostgresSpecialTypesBatchSql = "COPY postgres_special_types (c_uuid) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresSpecialTypesBatchArgs(Guid? CUuid); + public async Task InsertPostgresSpecialTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresSpecialTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresSpecialTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_special_types GROUP BY c_uuid LIMIT 1"; + public readonly record struct GetPostgresSpecialTypesCntRow(Guid? CUuid, long Cnt); + public async Task GetPostgresSpecialTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString!)) + { + using (var command = connection.CreateCommand(GetPostgresSpecialTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresSpecialTypesCntRow + { + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresSpecialTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresSpecialTypesCntRow + { + CUuid = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } + } + } + + return null; + } + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public readonly record struct InsertPostgresArrayTypesArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CDateArray, DateTime[]? CTimestampArray); public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index 5236f9a0..e3ef486e 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -115,33 +115,6 @@ } ] }, - { - "rel": { - "name": "postgres_types" - }, - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } - } - ] - }, { "rel": { "name": "postgres_numeric_types" @@ -426,73 +399,6 @@ } ] }, - { - "rel": { - "name": "postgres_unstructured_types" - }, - "columns": [ - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - } - ] - }, { "rel": { "name": "postgres_array_types" @@ -662,6 +568,93 @@ } } ] + }, + { + "rel": { + "name": "postgres_special_types" + }, + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + } + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_jsonb", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonb" + } + }, + { + "name": "c_jsonpath", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonpath" + } + }, + { + "name": "c_xml", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + } + ] } ], "enums": [ @@ -33465,130 +33458,6 @@ "cmd": ":exec", "filename": "query.sql" }, - { - "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 2, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } - } - } - ], - "comments": [ - " Special types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, { "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", @@ -33624,12 +33493,6 @@ ], "filename": "query.sql" }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" - }, { "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", "name": "InsertPostgresNumericTypes", @@ -35169,8 +35032,8 @@ } }, { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", + "text": "\nINSERT INTO postgres_special_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml,\n $7,\n $8::c_enum\n)", + "name": "InsertPostgresSpecialTypes", "cmd": ":exec", "parameters": [ { @@ -35232,26 +35095,52 @@ "name": "xml" } } + }, + { + "number": 7, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 8, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } } ], "comments": [ - " Unstructured types " + " Special types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" } }, { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\nFROM postgres_special_types \nLIMIT 1", + "name": "GetPostgresSpecialTypes", "cmd": ":one", "columns": [ { "name": "c_json", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35262,7 +35151,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35273,7 +35162,7 @@ "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonb" @@ -35284,7 +35173,7 @@ "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonpath" @@ -35295,7 +35184,7 @@ "name": "c_xml", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" @@ -35306,22 +35195,98 @@ "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" }, "originalName": "c_xml_string_override" + }, + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", + "text": "TRUNCATE TABLE postgres_special_types", + "name": "TruncatePostgresSpecialTypes", "cmd": ":exec", "filename": "query.sql" }, + { + "text": "INSERT INTO postgres_special_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresSpecialTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_uuid", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_special_types" + } + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_special_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresSpecialTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 3a216096..7fbb6982 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlb╤ examples/NpgsqlExamplecsharpШ{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"net8.0","useDapper":false}* -./dist/LocalRunnerъь public"мpublicГ +./dist/LocalRunner├ь public"ЕpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,10 +13,7 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextr -postgres_types- -c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enum╘ + description0         Rbooksbtext╘ postgres_numeric_typesD c_boolean0         Rpostgres_numeric_typesb pg_catalogbool? @@ -64,16 +61,7 @@ pg_cataloginterval c_macaddr0         Rpostgres_network_typesb macaddr= c_macaddr80         Rpostgres_network_typesb -macaddr8н -postgres_unstructured_types: -c_json0         Rpostgres_unstructured_typesbjsonJ -c_json_string_override0         Rpostgres_unstructured_typesbjson< -c_jsonb0         Rpostgres_unstructured_typesbjsonbB - -c_jsonpath0         Rpostgres_unstructured_typesb -jsonpath8 -c_xml0         Rpostgres_unstructured_typesbxmlH -c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ +macaddr8Х postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -93,7 +81,18 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" +c_circle0         Rpostgres_geometric_typesbcircle· +postgres_special_types5 +c_uuid0         Rpostgres_special_typesbuuid7 +c_enum0         Rpostgres_special_typesbc_enum5 +c_json0         Rpostgres_special_typesbjsonE +c_json_string_override0         Rpostgres_special_typesbjson7 +c_jsonb0         Rpostgres_special_typesbjsonb= + +c_jsonpath0         Rpostgres_special_typesb +jsonpath3 +c_xml0         Rpostgres_special_typesbxmlC +c_xml_string_override0         Rpostgres_special_typesbxml" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10325,43 +10324,7 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ -X -INSERT INTO postgres_types -( - c_uuid, - c_enum -) -VALUES ( - $1, - $2::c_enum -)InsertPostgresTypes:exec*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ -;INSERT INTO postgres_types -( - c_uuid -) -VALUES ( - $1 -)InsertPostgresTypesBatch :copyfrom*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ -:SELECT - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ -VSELECT - c_uuid, - COUNT(*) AS cnt -FROM postgres_types -GROUP BY - c_uuid -LIMIT 1GetPostgresTypesCnt:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sql№ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql№ ╥SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, @@ -10374,8 +10337,7 @@ CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +anyarray: query.sqlр э INSERT INTO postgres_numeric_types ( @@ -10708,16 +10670,18 @@ LIMIT 1GetPostgresNetworkTypesCnt:one"= ) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE c_inet0         R publicpostgres_network_typesbinetzc_inet*RN - c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ -∙ -INSERT INTO postgres_unstructured_types + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types▄ +д +INSERT INTO postgres_special_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override + c_xml_string_override, + c_uuid, + c_enum ) VALUES ( $1::json, @@ -10725,8 +10689,10 @@ VALUES ( $3::jsonb, $4::jsonpath, $5::xml, - $6::xml -)InsertPostgresUnstructuredTypes:exec* + $6::xml, + $7, + $8::c_enum +)InsertPostgresSpecialTypes:exec* c_json0         bjson*/+ c_json_string_override0         bjson*! c_jsonb0         bjsonb*'# @@ -10734,26 +10700,49 @@ VALUES ( c_jsonpath0         b jsonpath* c_xml0         bxml*-) -c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн -ЪSELECT +c_xml_string_override0         bxml*KG +c_uuid0         8R publicpostgres_special_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_special_typesЭ +нSELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override -FROM postgres_unstructured_types -LIMIT 1GetPostgresUnstructuredTypes:one"B -c_json0         Rpostgres_unstructured_typesbjsonzc_json"b -c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E -c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N - -c_jsonpath0         Rpostgres_unstructured_typesb + c_xml_string_override, + c_uuid, + c_enum +FROM postgres_special_types +LIMIT 1GetPostgresSpecialTypes:one"= +c_json0         Rpostgres_special_typesbjsonzc_json"] +c_json_string_override0         Rpostgres_special_typesbjsonzc_json_string_override"@ +c_jsonb0         Rpostgres_special_typesbjsonbzc_jsonb"I + +c_jsonpath0         Rpostgres_special_typesb jsonpathz -c_jsonpath"? -c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ -c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla -*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +c_jsonpath": +c_xml0         Rpostgres_special_typesbxmlzc_xml"Z +c_xml_string_override0         Rpostgres_special_typesbxmlzc_xml_string_override"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid"? +c_enum0         Rpostgres_special_typesbc_enumzc_enum: query.sqlW +%TRUNCATE TABLE postgres_special_typesTruncatePostgresSpecialTypes:exec: query.sqlс +CINSERT INTO postgres_special_types +( + c_uuid +) +VALUES ( + $1 +)InsertPostgresSpecialTypesBatch :copyfrom*IE +c_uuid0         R publicpostgres_special_typesbuuidzc_uuid: query.sqlBpostgres_special_typesь +^SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_special_types +GROUP BY + c_uuid +LIMIT 1GetPostgresSpecialTypesCnt:one"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql▌ ╧ INSERT INTO postgres_array_types ( diff --git a/examples/NpgsqlLegacyExample/Models.cs b/examples/NpgsqlLegacyExample/Models.cs index 01b6408b..6482da2a 100644 --- a/examples/NpgsqlLegacyExample/Models.cs +++ b/examples/NpgsqlLegacyExample/Models.cs @@ -23,11 +23,6 @@ public class Book public long AuthorId { get; set; } public string Description { get; set; } }; - public class PostgresType - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; public class PostgresNumericType { public bool? CBoolean { get; set; } @@ -64,15 +59,6 @@ public class PostgresNetworkType public PhysicalAddress CMacaddr { get; set; } public string CMacaddr8 { get; set; } }; - public class PostgresUnstructuredType - { - public JsonElement? CJson { get; set; } - public JsonElement? CJsonStringOverride { get; set; } - public JsonElement? CJsonb { get; set; } - public string CJsonpath { get; set; } - public XmlDocument CXml { get; set; } - public XmlDocument CXmlStringOverride { get; set; } - }; public class PostgresArrayType { public byte[] CBytea { get; set; } @@ -93,6 +79,17 @@ public class PostgresGeometricType public NpgsqlPolygon? CPolygon { get; set; } public NpgsqlCircle? CCircle { get; set; } }; + public class PostgresSpecialType + { + public Guid? CUuid { get; set; } + public CEnum? CEnum { get; set; } + public JsonElement? CJson { get; set; } + public JsonElement? CJsonStringOverride { get; set; } + public JsonElement? CJsonb { get; set; } + public string CJsonpath { get; set; } + public XmlDocument CXml { get; set; } + public XmlDocument CXmlStringOverride { get; set; } + }; public class ExtendedBio { public string AuthorName { get; set; } diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index ed987e4e..3ee1fd51 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -854,172 +854,6 @@ public async Task TruncateExtendedBios() } } - private const string InsertPostgresTypesSql = " INSERT INTO postgres_types ( c_uuid, c_enum ) VALUES ( @c_uuid, @c_enum::c_enum )"; - public class InsertPostgresTypesArgs - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; - public async Task InsertPostgresTypes(InsertPostgresTypesArgs args) - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(InsertPostgresTypesSql)) - { - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = InsertPostgresTypesSql; - command.Transaction = this.Transaction; - command.Parameters.AddWithValue("@c_uuid", args.CUuid ?? (object)DBNull.Value); - command.Parameters.AddWithValue("@c_enum", args.CEnum != null ? args.CEnum.Value.Stringify() : (object)DBNull.Value); - await command.ExecuteNonQueryAsync(); - } - } - - private const string InsertPostgresTypesBatchSql = "COPY postgres_types (c_uuid) FROM STDIN (FORMAT BINARY)"; - public class InsertPostgresTypesBatchArgs - { - public Guid? CUuid { get; set; } - }; - public async Task InsertPostgresTypesBatch(List args) - { - using (var connection = new NpgsqlConnection(ConnectionString)) - { - await connection.OpenAsync(); - using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresTypesBatchSql)) - { - foreach (var row in args) - { - await writer.StartRowAsync(); - await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); - } - - await writer.CompleteAsync(); - } - - await connection.CloseAsync(); - } - } - - private const string GetPostgresTypesSql = "SELECT c_uuid, c_enum FROM postgres_types LIMIT 1"; - public class GetPostgresTypesRow - { - public Guid? CUuid { get; set; } - public CEnum? CEnum { get; set; } - }; - public async Task GetPostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetPostgresTypesSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesRow - { - CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), - CEnum = reader.IsDBNull(1) ? (CEnum? )null : reader.GetString(1).ToCEnum() - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetPostgresTypesSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesRow - { - CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), - CEnum = reader.IsDBNull(1) ? (CEnum? )null : reader.GetString(1).ToCEnum() - }; - } - } - } - - return null; - } - - private const string GetPostgresTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_types GROUP BY c_uuid LIMIT 1"; - public class GetPostgresTypesCntRow - { - public Guid? CUuid { get; set; } - public long Cnt { get; set; } - }; - public async Task GetPostgresTypesCnt() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(GetPostgresTypesCntSql)) - { - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } - } - } - } - - return null; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = GetPostgresTypesCntSql; - command.Transaction = this.Transaction; - using (var reader = await command.ExecuteReaderAsync()) - { - if (await reader.ReadAsync()) - { - return new GetPostgresTypesCntRow - { - CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) - }; - } - } - } - - return null; - } - private const string GetPostgresFunctionsSql = "SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, MAX(c_timestamp) AS max_timestamp FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types"; public class GetPostgresFunctionsRow { @@ -1076,32 +910,6 @@ public async Task GetPostgresFunctions() return null; } - private const string TruncatePostgresTypesSql = "TRUNCATE TABLE postgres_types"; - public async Task TruncatePostgresTypes() - { - if (this.Transaction == null) - { - using (var connection = NpgsqlDataSource.Create(ConnectionString)) - { - using (var command = connection.CreateCommand(TruncatePostgresTypesSql)) - { - await command.ExecuteNonQueryAsync(); - } - } - - return; - } - - if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) - throw new InvalidOperationException("Transaction is provided, but its connection is null."); - using (var command = this.Transaction.Connection.CreateCommand()) - { - command.CommandText = TruncatePostgresTypesSql; - command.Transaction = this.Transaction; - await command.ExecuteNonQueryAsync(); - } - } - private const string InsertPostgresNumericTypesSql = " INSERT INTO postgres_numeric_types ( c_boolean, c_bit, c_smallint, c_integer, c_bigint, c_decimal, c_numeric, c_real, c_double_precision, c_money ) VALUES (@c_boolean, @c_bit, @c_smallint, @c_integer, @c_bigint, @c_decimal, @c_numeric, @c_real, @c_double_precision, @c_money)"; public class InsertPostgresNumericTypesArgs { @@ -2126,8 +1934,8 @@ public async Task InsertPostgresNetworkTypesBatch(List GetPostgresUnstructuredTypes() + public async Task GetPostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(GetPostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(GetPostgresSpecialTypesSql)) { using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresSpecialTypesRow { CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), @@ -2207,7 +2023,9 @@ public async Task GetPostgresUnstructuredTypes( xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5), + CUuid = reader.IsDBNull(6) ? (Guid? )null : reader.GetFieldValue(6), + CEnum = reader.IsDBNull(7) ? (CEnum? )null : reader.GetString(7).ToCEnum() }; } } @@ -2221,13 +2039,13 @@ public async Task GetPostgresUnstructuredTypes( throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = GetPostgresUnstructuredTypesSql; + command.CommandText = GetPostgresSpecialTypesSql; command.Transaction = this.Transaction; using (var reader = await command.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { - return new GetPostgresUnstructuredTypesRow + return new GetPostgresSpecialTypesRow { CJson = reader.IsDBNull(0) ? (JsonElement? )null : JsonSerializer.Deserialize(reader.GetString(0)), CJsonStringOverride = reader.IsDBNull(1) ? null : reader.GetString(1), @@ -2239,7 +2057,9 @@ public async Task GetPostgresUnstructuredTypes( xmlDoc.LoadXml(r.GetString(o)); return xmlDoc; }))(reader, 4), - CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5) + CXmlStringOverride = reader.IsDBNull(5) ? null : reader.GetString(5), + CUuid = reader.IsDBNull(6) ? (Guid? )null : reader.GetFieldValue(6), + CEnum = reader.IsDBNull(7) ? (CEnum? )null : reader.GetString(7).ToCEnum() }; } } @@ -2248,14 +2068,14 @@ public async Task GetPostgresUnstructuredTypes( return null; } - private const string TruncatePostgresUnstructuredTypesSql = "TRUNCATE TABLE postgres_unstructured_types"; - public async Task TruncatePostgresUnstructuredTypes() + private const string TruncatePostgresSpecialTypesSql = "TRUNCATE TABLE postgres_special_types"; + public async Task TruncatePostgresSpecialTypes() { if (this.Transaction == null) { using (var connection = NpgsqlDataSource.Create(ConnectionString)) { - using (var command = connection.CreateCommand(TruncatePostgresUnstructuredTypesSql)) + using (var command = connection.CreateCommand(TruncatePostgresSpecialTypesSql)) { await command.ExecuteNonQueryAsync(); } @@ -2268,12 +2088,90 @@ public async Task TruncatePostgresUnstructuredTypes() throw new InvalidOperationException("Transaction is provided, but its connection is null."); using (var command = this.Transaction.Connection.CreateCommand()) { - command.CommandText = TruncatePostgresUnstructuredTypesSql; + command.CommandText = TruncatePostgresSpecialTypesSql; command.Transaction = this.Transaction; await command.ExecuteNonQueryAsync(); } } + private const string InsertPostgresSpecialTypesBatchSql = "COPY postgres_special_types (c_uuid) FROM STDIN (FORMAT BINARY)"; + public class InsertPostgresSpecialTypesBatchArgs + { + public Guid? CUuid { get; set; } + }; + public async Task InsertPostgresSpecialTypesBatch(List args) + { + using (var connection = new NpgsqlConnection(ConnectionString)) + { + await connection.OpenAsync(); + using (var writer = await connection.BeginBinaryImportAsync(InsertPostgresSpecialTypesBatchSql)) + { + foreach (var row in args) + { + await writer.StartRowAsync(); + await writer.WriteAsync(row.CUuid ?? (object)DBNull.Value); + } + + await writer.CompleteAsync(); + } + + await connection.CloseAsync(); + } + } + + private const string GetPostgresSpecialTypesCntSql = "SELECT c_uuid, COUNT(*) AS cnt FROM postgres_special_types GROUP BY c_uuid LIMIT 1"; + public class GetPostgresSpecialTypesCntRow + { + public Guid? CUuid { get; set; } + public long Cnt { get; set; } + }; + public async Task GetPostgresSpecialTypesCnt() + { + if (this.Transaction == null) + { + using (var connection = NpgsqlDataSource.Create(ConnectionString)) + { + using (var command = connection.CreateCommand(GetPostgresSpecialTypesCntSql)) + { + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresSpecialTypesCntRow + { + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } + } + } + } + + return null; + } + + if (this.Transaction?.Connection == null || this.Transaction?.Connection.State != System.Data.ConnectionState.Open) + throw new InvalidOperationException("Transaction is provided, but its connection is null."); + using (var command = this.Transaction.Connection.CreateCommand()) + { + command.CommandText = GetPostgresSpecialTypesCntSql; + command.Transaction = this.Transaction; + using (var reader = await command.ExecuteReaderAsync()) + { + if (await reader.ReadAsync()) + { + return new GetPostgresSpecialTypesCntRow + { + CUuid = reader.IsDBNull(0) ? (Guid? )null : reader.GetFieldValue(0), + Cnt = reader.GetInt64(1) + }; + } + } + } + + return null; + } + private const string InsertPostgresArrayTypesSql = " INSERT INTO postgres_array_types ( c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_date_array, c_timestamp_array ) VALUES (@c_bytea, @c_boolean_array, @c_text_array, @c_integer_array, @c_decimal_array, @c_date_array, @c_timestamp_array)"; public class InsertPostgresArrayTypesArgs { diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index e44c4cd8..0231a546 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -115,33 +115,6 @@ } ] }, - { - "rel": { - "name": "postgres_types" - }, - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - } - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - } - } - ] - }, { "rel": { "name": "postgres_numeric_types" @@ -426,73 +399,6 @@ } ] }, - { - "rel": { - "name": "postgres_unstructured_types" - }, - "columns": [ - { - "name": "c_json", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_json_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "json" - } - }, - { - "name": "c_jsonb", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonb" - } - }, - { - "name": "c_jsonpath", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "jsonpath" - } - }, - { - "name": "c_xml", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - }, - { - "name": "c_xml_string_override", - "length": -1, - "table": { - "name": "postgres_unstructured_types" - }, - "type": { - "name": "xml" - } - } - ] - }, { "rel": { "name": "postgres_array_types" @@ -662,6 +568,93 @@ } } ] + }, + { + "rel": { + "name": "postgres_special_types" + }, + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + } + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + } + }, + { + "name": "c_json", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_json_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "json" + } + }, + { + "name": "c_jsonb", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonb" + } + }, + { + "name": "c_jsonpath", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "jsonpath" + } + }, + { + "name": "c_xml", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + }, + { + "name": "c_xml_string_override", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "xml" + } + } + ] } ], "enums": [ @@ -33465,130 +33458,6 @@ "cmd": ":exec", "filename": "query.sql" }, - { - "text": "\nINSERT INTO postgres_types\n(\n c_uuid,\n c_enum\n)\nVALUES (\n $1,\n $2::c_enum\n)", - "name": "InsertPostgresTypes", - "cmd": ":exec", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "isNamedParam": true, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - }, - { - "number": 2, - "column": { - "name": "c_enum", - "length": -1, - "type": { - "name": "c_enum" - } - } - } - ], - "comments": [ - " Special types " - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "INSERT INTO postgres_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", - "name": "InsertPostgresTypesBatch", - "cmd": ":copyfrom", - "parameters": [ - { - "number": 1, - "column": { - "name": "c_uuid", - "length": -1, - "table": { - "schema": "public", - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - } - } - ], - "filename": "query.sql", - "insert_into_table": { - "name": "postgres_types" - } - }, - { - "text": "SELECT\n c_uuid,\n c_enum\nFROM postgres_types \nLIMIT 1", - "name": "GetPostgresTypes", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "c_enum", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "c_enum" - }, - "originalName": "c_enum" - } - ], - "filename": "query.sql" - }, - { - "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_types\nGROUP BY\n c_uuid\nLIMIT 1", - "name": "GetPostgresTypesCnt", - "cmd": ":one", - "columns": [ - { - "name": "c_uuid", - "length": -1, - "table": { - "name": "postgres_types" - }, - "type": { - "name": "uuid" - }, - "originalName": "c_uuid" - }, - { - "name": "cnt", - "notNull": true, - "length": -1, - "isFuncCall": true, - "type": { - "name": "bigint" - } - } - ], - "filename": "query.sql" - }, { "text": "SELECT\n MAX(c_integer) AS max_integer,\n MAX(c_varchar) AS max_varchar,\n MAX(c_timestamp) AS max_timestamp\nFROM postgres_datetime_types\nCROSS JOIN postgres_numeric_types\nCROSS JOIN postgres_string_types", "name": "GetPostgresFunctions", @@ -33624,12 +33493,6 @@ ], "filename": "query.sql" }, - { - "text": "TRUNCATE TABLE postgres_types", - "name": "TruncatePostgresTypes", - "cmd": ":exec", - "filename": "query.sql" - }, { "text": "\nINSERT INTO postgres_numeric_types\n(\n c_boolean,\n c_bit,\n c_smallint,\n c_integer,\n c_bigint,\n c_decimal,\n c_numeric,\n c_real,\n c_double_precision,\n c_money\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", "name": "InsertPostgresNumericTypes", @@ -35169,8 +35032,8 @@ } }, { - "text": "\nINSERT INTO postgres_unstructured_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml\n)", - "name": "InsertPostgresUnstructuredTypes", + "text": "\nINSERT INTO postgres_special_types\n(\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\n)\nVALUES (\n $1::json, \n $2::json, \n $3::jsonb,\n $4::jsonpath,\n $5::xml,\n $6::xml,\n $7,\n $8::c_enum\n)", + "name": "InsertPostgresSpecialTypes", "cmd": ":exec", "parameters": [ { @@ -35232,26 +35095,52 @@ "name": "xml" } } + }, + { + "number": 7, + "column": { + "name": "c_uuid", + "length": -1, + "isNamedParam": true, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + }, + { + "number": 8, + "column": { + "name": "c_enum", + "length": -1, + "type": { + "name": "c_enum" + } + } } ], "comments": [ - " Unstructured types " + " Special types " ], "filename": "query.sql", "insert_into_table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" } }, { - "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override\nFROM postgres_unstructured_types \nLIMIT 1", - "name": "GetPostgresUnstructuredTypes", + "text": "SELECT\n c_json,\n c_json_string_override,\n c_jsonb,\n c_jsonpath,\n c_xml,\n c_xml_string_override,\n c_uuid,\n c_enum\nFROM postgres_special_types \nLIMIT 1", + "name": "GetPostgresSpecialTypes", "cmd": ":one", "columns": [ { "name": "c_json", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35262,7 +35151,7 @@ "name": "c_json_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "json" @@ -35273,7 +35162,7 @@ "name": "c_jsonb", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonb" @@ -35284,7 +35173,7 @@ "name": "c_jsonpath", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "jsonpath" @@ -35295,7 +35184,7 @@ "name": "c_xml", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" @@ -35306,22 +35195,98 @@ "name": "c_xml_string_override", "length": -1, "table": { - "name": "postgres_unstructured_types" + "name": "postgres_special_types" }, "type": { "name": "xml" }, "originalName": "c_xml_string_override" + }, + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "c_enum", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "c_enum" + }, + "originalName": "c_enum" } ], "filename": "query.sql" }, { - "text": "TRUNCATE TABLE postgres_unstructured_types", - "name": "TruncatePostgresUnstructuredTypes", + "text": "TRUNCATE TABLE postgres_special_types", + "name": "TruncatePostgresSpecialTypes", "cmd": ":exec", "filename": "query.sql" }, + { + "text": "INSERT INTO postgres_special_types\n(\n c_uuid\n)\nVALUES (\n $1\n)", + "name": "InsertPostgresSpecialTypesBatch", + "cmd": ":copyfrom", + "parameters": [ + { + "number": 1, + "column": { + "name": "c_uuid", + "length": -1, + "table": { + "schema": "public", + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + } + } + ], + "filename": "query.sql", + "insert_into_table": { + "name": "postgres_special_types" + } + }, + { + "text": "SELECT\n c_uuid,\n COUNT(*) AS cnt\nFROM postgres_special_types\nGROUP BY\n c_uuid\nLIMIT 1", + "name": "GetPostgresSpecialTypesCnt", + "cmd": ":one", + "columns": [ + { + "name": "c_uuid", + "length": -1, + "table": { + "name": "postgres_special_types" + }, + "type": { + "name": "uuid" + }, + "originalName": "c_uuid" + }, + { + "name": "cnt", + "notNull": true, + "length": -1, + "isFuncCall": true, + "type": { + "name": "bigint" + } + } + ], + "filename": "query.sql" + }, { "text": "\nINSERT INTO postgres_array_types\n(\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_date_array,\n c_timestamp_array\n)\nVALUES ($1, $2, $3, $4, $5, $6, $7)", "name": "InsertPostgresArrayTypes", diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index f9b68e43..f81d5a47 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -3,7 +3,7 @@ 2 postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postgresql/types/schema.sql",examples/config/postgresql/authors/query.sql"*examples/config/postgresql/types/query.sqlbх examples/NpgsqlLegacyExamplecsharpж{"debugRequest":true,"generateCsproj":true,"namespaceName":"NpgsqlLegacyExampleGen","overrides":[{"column":"GetPostgresFunctions:max_integer","csharp_type":{"notNull":false,"type":"int"}},{"column":"GetPostgresFunctions:max_varchar","csharp_type":{"notNull":false,"type":"string"}},{"column":"GetPostgresFunctions:max_timestamp","csharp_type":{"notNull":true,"type":"DateTime"}},{"column":"*:c_json_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_xml_string_override","csharp_type":{"notNull":false,"type":"string"}},{"column":"*:c_macaddr8","csharp_type":{"notNull":false,"type":"string"}}],"targetFramework":"netstandard2.0","useDapper":false}* -./dist/LocalRunnerъь public"мpublicГ +./dist/LocalRunner├ь public"ЕpublicГ authors) id0         R authorsb  bigserial& name0         R authorsbtext# @@ -13,10 +13,7 @@ postgresql-examples/config/postgresql/authors/schema.sql+examples/config/postg name0         Rbooksbtext5 author_id0         Rbooksb pg_catalogint8) - description0         Rbooksbtextr -postgres_types- -c_uuid0         Rpostgres_typesbuuid/ -c_enum0         Rpostgres_typesbc_enum╘ + description0         Rbooksbtext╘ postgres_numeric_typesD c_boolean0         Rpostgres_numeric_typesb pg_catalogbool? @@ -64,16 +61,7 @@ pg_cataloginterval c_macaddr0         Rpostgres_network_typesb macaddr= c_macaddr80         Rpostgres_network_typesb -macaddr8н -postgres_unstructured_types: -c_json0         Rpostgres_unstructured_typesbjsonJ -c_json_string_override0         Rpostgres_unstructured_typesbjson< -c_jsonb0         Rpostgres_unstructured_typesbjsonbB - -c_jsonpath0         Rpostgres_unstructured_typesb -jsonpath8 -c_xml0         Rpostgres_unstructured_typesbxmlH -c_xml_string_override0         Rpostgres_unstructured_typesbxmlХ +macaddr8Х postgres_array_types5 c_bytea0         Rpostgres_array_typesbbyteaM c_boolean_array 0         Rpostgres_array_typesb @@ -93,7 +81,18 @@ pg_catalog timestamp c_box0         Rpostgres_geometric_typesbbox7 c_path0         Rpostgres_geometric_typesbpath= c_polygon0         Rpostgres_geometric_typesb polygon; -c_circle0         Rpostgres_geometric_typesbcircle" +c_circle0         Rpostgres_geometric_typesbcircle· +postgres_special_types5 +c_uuid0         Rpostgres_special_typesbuuid7 +c_enum0         Rpostgres_special_typesbc_enum5 +c_json0         Rpostgres_special_typesbjsonE +c_json_string_override0         Rpostgres_special_typesbjson7 +c_jsonb0         Rpostgres_special_typesbjsonb= + +c_jsonpath0         Rpostgres_special_typesb +jsonpath3 +c_xml0         Rpostgres_special_typesbxmlC +c_xml_string_override0         Rpostgres_special_typesbxml" c_enumsmallmediumbig" pg_temp"ц▓ pg_catalogЙ & @@ -10325,43 +10324,7 @@ pg_catalogvarcharz author_name"B pg_catalogvarcharzname"G bio_type0         Rextendedbiosbextendedbio_typezbio_type*JF bio_type0         Rextendedbiosbextended.bio_typezbio_type: query.sqlF -TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sqlМ -X -INSERT INTO postgres_types -( - c_uuid, - c_enum -) -VALUES ( - $1, - $2::c_enum -)InsertPostgresTypes:exec*C? -c_uuid0         8Rpublicpostgres_typesbuuidzc_uuid*! -c_enum0         bc_enum2 Special types : query.sqlBpostgres_types┬ -;INSERT INTO postgres_types -( - c_uuid -) -VALUES ( - $1 -)InsertPostgresTypesBatch :copyfrom*A= -c_uuid0         Rpublicpostgres_typesbuuidzc_uuid: query.sqlBpostgres_types╧ -:SELECT - c_uuid, - c_enum -FROM postgres_types -LIMIT 1GetPostgresTypes:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid"7 -c_enum0         Rpostgres_typesbc_enumzc_enum: query.sql╒ -VSELECT - c_uuid, - COUNT(*) AS cnt -FROM postgres_types -GROUP BY - c_uuid -LIMIT 1GetPostgresTypesCnt:one"5 -c_uuid0         Rpostgres_typesbuuidzc_uuid" -cnt0         @bbigint: query.sql№ +TRUNCATE TABLE extended.biosTruncateExtendedBios:exec: query.sql№ ╥SELECT MAX(c_integer) AS max_integer, MAX(c_varchar) AS max_varchar, @@ -10374,8 +10337,7 @@ CROSS JOIN postgres_string_typesGetPostgresFunctions:one"( max_varchar0         @b anyarray"* max_timestamp0         @b -anyarray: query.sqlH -TRUNCATE TABLE postgres_typesTruncatePostgresTypes:exec: query.sqlр +anyarray: query.sqlр э INSERT INTO postgres_numeric_types ( @@ -10708,16 +10670,18 @@ LIMIT 1GetPostgresNetworkTypesCnt:one"= ) VALUES ($1, $2, $3)InsertPostgresNetworkTypesBatch :copyfrom*IE c_cidr0         R publicpostgres_network_typesbcidrzc_cidr*IE c_inet0         R publicpostgres_network_typesbinetzc_inet*RN - c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types╨ -∙ -INSERT INTO postgres_unstructured_types + c_macaddr0         R publicpostgres_network_typesb macaddrz c_macaddr: query.sqlBpostgres_network_types▄ +д +INSERT INTO postgres_special_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override + c_xml_string_override, + c_uuid, + c_enum ) VALUES ( $1::json, @@ -10725,8 +10689,10 @@ VALUES ( $3::jsonb, $4::jsonpath, $5::xml, - $6::xml -)InsertPostgresUnstructuredTypes:exec* + $6::xml, + $7, + $8::c_enum +)InsertPostgresSpecialTypes:exec* c_json0         bjson*/+ c_json_string_override0         bjson*! c_jsonb0         bjsonb*'# @@ -10734,26 +10700,49 @@ VALUES ( c_jsonpath0         b jsonpath* c_xml0         bxml*-) -c_xml_string_override0         bxml2 Unstructured types : query.sqlBpostgres_unstructured_typesн -ЪSELECT +c_xml_string_override0         bxml*KG +c_uuid0         8R publicpostgres_special_typesbuuidzc_uuid*! +c_enum0         bc_enum2 Special types : query.sqlBpostgres_special_typesЭ +нSELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override -FROM postgres_unstructured_types -LIMIT 1GetPostgresUnstructuredTypes:one"B -c_json0         Rpostgres_unstructured_typesbjsonzc_json"b -c_json_string_override0         Rpostgres_unstructured_typesbjsonzc_json_string_override"E -c_jsonb0         Rpostgres_unstructured_typesbjsonbzc_jsonb"N - -c_jsonpath0         Rpostgres_unstructured_typesb + c_xml_string_override, + c_uuid, + c_enum +FROM postgres_special_types +LIMIT 1GetPostgresSpecialTypes:one"= +c_json0         Rpostgres_special_typesbjsonzc_json"] +c_json_string_override0         Rpostgres_special_typesbjsonzc_json_string_override"@ +c_jsonb0         Rpostgres_special_typesbjsonbzc_jsonb"I + +c_jsonpath0         Rpostgres_special_typesb jsonpathz -c_jsonpath"? -c_xml0         Rpostgres_unstructured_typesbxmlzc_xml"_ -c_xml_string_override0         Rpostgres_unstructured_typesbxmlzc_xml_string_override: query.sqla -*TRUNCATE TABLE postgres_unstructured_types!TruncatePostgresUnstructuredTypes:exec: query.sql▌ +c_jsonpath": +c_xml0         Rpostgres_special_typesbxmlzc_xml"Z +c_xml_string_override0         Rpostgres_special_typesbxmlzc_xml_string_override"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid"? +c_enum0         Rpostgres_special_typesbc_enumzc_enum: query.sqlW +%TRUNCATE TABLE postgres_special_typesTruncatePostgresSpecialTypes:exec: query.sqlс +CINSERT INTO postgres_special_types +( + c_uuid +) +VALUES ( + $1 +)InsertPostgresSpecialTypesBatch :copyfrom*IE +c_uuid0         R publicpostgres_special_typesbuuidzc_uuid: query.sqlBpostgres_special_typesь +^SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_special_types +GROUP BY + c_uuid +LIMIT 1GetPostgresSpecialTypesCnt:one"= +c_uuid0         Rpostgres_special_typesbuuidzc_uuid" +cnt0         @bbigint: query.sql▌ ╧ INSERT INTO postgres_array_types ( diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index 86a95910..ed18aeee 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -1,41 +1,3 @@ -/* Special types */ - --- name: InsertPostgresTypes :exec -INSERT INTO postgres_types -( - c_uuid, - c_enum -) -VALUES ( - sqlc.narg('c_uuid'), - sqlc.narg('c_enum')::c_enum -); - --- name: InsertPostgresTypesBatch :copyfrom -INSERT INTO postgres_types -( - c_uuid -) -VALUES ( - $1 -); - --- name: GetPostgresTypes :one -SELECT - c_uuid, - c_enum -FROM postgres_types -LIMIT 1; - --- name: GetPostgresTypesCnt :one -SELECT - c_uuid, - COUNT(*) AS cnt -FROM postgres_types -GROUP BY - c_uuid -LIMIT 1; - -- name: GetPostgresFunctions :one SELECT MAX(c_integer) AS max_integer, @@ -45,9 +7,6 @@ FROM postgres_datetime_types CROSS JOIN postgres_numeric_types CROSS JOIN postgres_string_types; --- name: TruncatePostgresTypes :exec -TRUNCATE TABLE postgres_types; - /* Numeric types */ -- name: InsertPostgresNumericTypes :exec @@ -269,17 +228,19 @@ INSERT INTO postgres_network_types c_macaddr ) VALUES ($1, $2, $3); -/* Unstructured types */ +/* Special types */ --- name: InsertPostgresUnstructuredTypes :exec -INSERT INTO postgres_unstructured_types +-- name: InsertPostgresSpecialTypes :exec +INSERT INTO postgres_special_types ( c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override + c_xml_string_override, + c_uuid, + c_enum ) VALUES ( sqlc.narg('c_json')::json, @@ -287,22 +248,44 @@ VALUES ( sqlc.narg('c_jsonb')::jsonb, sqlc.narg('c_jsonpath')::jsonpath, sqlc.narg('c_xml')::xml, - sqlc.narg('c_xml_string_override')::xml + sqlc.narg('c_xml_string_override')::xml, + sqlc.narg('c_uuid'), + sqlc.narg('c_enum')::c_enum ); --- name: GetPostgresUnstructuredTypes :one +-- name: GetPostgresSpecialTypes :one SELECT c_json, c_json_string_override, c_jsonb, c_jsonpath, c_xml, - c_xml_string_override -FROM postgres_unstructured_types + c_xml_string_override, + c_uuid, + c_enum +FROM postgres_special_types LIMIT 1; --- name: TruncatePostgresUnstructuredTypes :exec -TRUNCATE TABLE postgres_unstructured_types; +-- name: TruncatePostgresSpecialTypes :exec +TRUNCATE TABLE postgres_special_types; + +-- name: InsertPostgresSpecialTypesBatch :copyfrom +INSERT INTO postgres_special_types +( + c_uuid +) +VALUES ( + $1 +); + +-- name: GetPostgresSpecialTypesCnt :one +SELECT + c_uuid, + COUNT(*) AS cnt +FROM postgres_special_types +GROUP BY + c_uuid +LIMIT 1; /* Array types */ @@ -337,7 +320,6 @@ LIMIT 1; -- name: TruncatePostgresArrayTypes :exec TRUNCATE TABLE postgres_array_types; - /* Geometric types */ -- name: InsertPostgresGeoTypes :exec diff --git a/examples/config/postgresql/types/schema.sql b/examples/config/postgresql/types/schema.sql index 9b2d6ed0..e269c9a1 100644 --- a/examples/config/postgresql/types/schema.sql +++ b/examples/config/postgresql/types/schema.sql @@ -1,13 +1,3 @@ -CREATE EXTENSION "uuid-ossp"; - -CREATE TYPE c_enum AS ENUM ('small', 'medium', 'big'); - -CREATE TABLE postgres_types ( - /* Special Data Types */ - c_uuid UUID, - c_enum c_enum -); - CREATE TABLE postgres_numeric_types ( c_boolean BOOLEAN, c_bit BIT(10), @@ -22,26 +12,26 @@ CREATE TABLE postgres_numeric_types ( ); CREATE TABLE postgres_string_types ( - c_char CHAR, - c_varchar VARCHAR(100), + c_char CHAR, + c_varchar VARCHAR(100), c_character_varying CHARACTER VARYING(100), - c_bpchar BPCHAR(100), - c_text TEXT + c_bpchar BPCHAR(100), + c_text TEXT ); CREATE TABLE postgres_datetime_types ( - c_date DATE, - c_time TIME, - c_timestamp TIMESTAMP, + c_date DATE, + c_time TIME, + c_timestamp TIMESTAMP, c_timestamp_with_tz TIMESTAMP WITH TIME ZONE, - c_interval INTERVAL + c_interval INTERVAL ); CREATE TABLE postgres_network_types ( - c_cidr CIDR, - c_inet INET, - c_macaddr MACADDR, - c_macaddr8 MACADDR8 + c_cidr CIDR, + c_inet INET, + c_macaddr MACADDR, + c_macaddr8 MACADDR8 ); CREATE EXTENSION "pg_trgm"; @@ -49,15 +39,6 @@ CREATE EXTENSION "btree_gin"; CREATE INDEX postgres_txt_idx ON postgres_string_types USING GIN (c_text); -CREATE TABLE postgres_unstructured_types ( - c_json JSON, - c_json_string_override JSON, - c_jsonb JSONB, - c_jsonpath JSONPATH, - c_xml XML, - c_xml_string_override XML -); - CREATE TABLE postgres_array_types ( c_bytea BYTEA, c_boolean_array BOOLEAN [], @@ -69,11 +50,26 @@ CREATE TABLE postgres_array_types ( ); CREATE TABLE postgres_geometric_types ( - c_point POINT, - c_line LINE, - c_lseg LSEG, - c_box BOX, - c_path PATH, - c_polygon POLYGON, - c_circle CIRCLE + c_point POINT, + c_line LINE, + c_lseg LSEG, + c_box BOX, + c_path PATH, + c_polygon POLYGON, + c_circle CIRCLE +); + +CREATE EXTENSION "uuid-ossp"; + +CREATE TYPE c_enum AS ENUM ('small', 'medium', 'big'); + +CREATE TABLE postgres_special_types ( + c_uuid UUID, + c_enum c_enum, + c_json JSON, + c_json_string_override JSON, + c_jsonb JSONB, + c_jsonpath JSONPATH, + c_xml XML, + c_xml_string_override XML ); \ No newline at end of file From 917a5fdfc5f34ce0d79233b50805436777cd77a7 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 23 Aug 2025 19:21:00 +0200 Subject: [PATCH 31/33] feat: support array data types in batch inserts --- .../Templates/PostgresTests.cs | 66 +++++++- .../NpgsqlDapperTester.generated.cs | 30 +++- .../EndToEndTests/NpgsqlTester.generated.cs | 30 +++- .../NpgsqlDapperTester.generated.cs | 30 +++- .../NpgsqlTester.generated.cs | 30 +++- examples/NpgsqlDapperExample/QuerySql.cs | 19 ++- examples/NpgsqlDapperExample/request.json | 158 +++++++++++++++++- examples/NpgsqlDapperExample/request.message | 51 +++++- .../NpgsqlDapperLegacyExample/QuerySql.cs | 19 ++- .../NpgsqlDapperLegacyExample/request.json | 158 +++++++++++++++++- .../NpgsqlDapperLegacyExample/request.message | 51 +++++- examples/NpgsqlExample/QuerySql.cs | 27 ++- examples/NpgsqlExample/request.json | 158 +++++++++++++++++- examples/NpgsqlExample/request.message | 51 +++++- examples/NpgsqlLegacyExample/QuerySql.cs | 33 +++- examples/NpgsqlLegacyExample/request.json | 158 +++++++++++++++++- examples/NpgsqlLegacyExample/request.message | 51 +++++- examples/config/postgresql/types/query.sql | 29 +++- 18 files changed, 1071 insertions(+), 78 deletions(-) diff --git a/end2end/EndToEndScaffold/Templates/PostgresTests.cs b/end2end/EndToEndScaffold/Templates/PostgresTests.cs index 46776aaa..1ed3c7dd 100644 --- a/end2end/EndToEndScaffold/Templates/PostgresTests.cs +++ b/end2end/EndToEndScaffold/Templates/PostgresTests.cs @@ -592,25 +592,74 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesRow x, QuerySql.GetPos [KnownTestType.PostgresArrayCopyFrom] = new TestImpl { Impl = $$""" + private static IEnumerable PostgresArrayCopyFromTestCases + { + get + { + yield return new TestCaseData( + 100, + new byte[] { 0x53, 0x56 }, + new bool[] { true, false }, + new string[] { "Sister Ray", "Venus in Furs" }, + new int[] { 1, 2 }, + new decimal[] { 132.13m, 23.22m }, + new DateTime[] { new DateTime(1984, 8, 26), new DateTime(2000, 1, 2) } + ).SetName("Valid Array Copy From"); + + yield return new TestCaseData( + 10, + new byte[] { }, + new bool[] { }, + new string[] { }, + new int[] { }, + new decimal[] { }, + new DateTime[] { } + ).SetName("Empty Array Copy From"); + + yield return new TestCaseData( + 10, + null, + null, + null, + null, + null, + null + ).SetName("Null Array Copy From"); + } + } + [Test] - [TestCase(100, new byte[] { 0x53, 0x56 })] - [TestCase(10, new byte[] { })] - [TestCase(10, null)] + [TestCaseSource(nameof(PostgresArrayCopyFromTestCases))] public async Task TestArrayCopyFrom( int batchSize, - byte[] cBytea) + byte[] cBytea, + bool[] cBooleanArray, + string[] cTextArray, + int[] cIntegerArray, + decimal[] cDecimalArray, + DateTime[] cTimestampArray) { var batchArgs = Enumerable.Range(0, batchSize) .Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }) .ToList(); await QuerySql.InsertPostgresArrayTypesBatch(batchArgs); var expected = new QuerySql.GetPostgresArrayTypesCntRow { Cnt = batchSize, - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }; var actual = await QuerySql.GetPostgresArrayTypesCnt(); @@ -620,6 +669,11 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); + Assert.That(x.CBooleanArray, Is.EqualTo(y.CBooleanArray)); + Assert.That(x.CTextArray, Is.EqualTo(y.CTextArray)); + Assert.That(x.CIntegerArray, Is.EqualTo(y.CIntegerArray)); + Assert.That(x.CDecimalArray, Is.EqualTo(y.CDecimalArray)); + Assert.That(x.CTimestampArray, Is.EqualTo(y.CTimestampArray)); } } """ diff --git a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs index f0358705..d46e473f 100644 --- a/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlDapperTester.generated.cs @@ -925,18 +925,31 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.Get } } + private static IEnumerable PostgresArrayCopyFromTestCases + { + get + { + yield return new TestCaseData(100, new byte[] { 0x53, 0x56 }, new bool[] { true, false }, new string[] { "Sister Ray", "Venus in Furs" }, new int[] { 1, 2 }, new decimal[] { 132.13m, 23.22m }, new DateTime[] { new DateTime(1984, 8, 26), new DateTime(2000, 1, 2) }).SetName("Valid Array Copy From"); + yield return new TestCaseData(10, new byte[] { }, new bool[] { }, new string[] { }, new int[] { }, new decimal[] { }, new DateTime[] { }).SetName("Empty Array Copy From"); + yield return new TestCaseData(10, null, null, null, null, null, null).SetName("Null Array Copy From"); + } + } + [Test] - [TestCase(100, new byte[] { 0x53, 0x56 })] - [TestCase(10, new byte[] { })] - [TestCase(10, null)] - public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) + [TestCaseSource(nameof(PostgresArrayCopyFromTestCases))] + public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea, bool[] cBooleanArray, string[] cTextArray, int[] cIntegerArray, decimal[] cDecimalArray, DateTime[] cTimestampArray) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea, CBooleanArray = cBooleanArray, CTextArray = cTextArray, CIntegerArray = cIntegerArray, CDecimalArray = cDecimalArray, CTimestampArray = cTimestampArray }).ToList(); await QuerySql.InsertPostgresArrayTypesBatch(batchArgs); var expected = new QuerySql.GetPostgresArrayTypesCntRow { Cnt = batchSize, - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }; var actual = await QuerySql.GetPostgresArrayTypesCnt(); AssertSingularEquals(expected, actual); @@ -944,6 +957,11 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); + Assert.That(x.CBooleanArray, Is.EqualTo(y.CBooleanArray)); + Assert.That(x.CTextArray, Is.EqualTo(y.CTextArray)); + Assert.That(x.CIntegerArray, Is.EqualTo(y.CIntegerArray)); + Assert.That(x.CDecimalArray, Is.EqualTo(y.CDecimalArray)); + Assert.That(x.CTimestampArray, Is.EqualTo(y.CTimestampArray)); } } diff --git a/end2end/EndToEndTests/NpgsqlTester.generated.cs b/end2end/EndToEndTests/NpgsqlTester.generated.cs index 2b35d057..fad87b7a 100644 --- a/end2end/EndToEndTests/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTests/NpgsqlTester.generated.cs @@ -925,18 +925,31 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.Get } } + private static IEnumerable PostgresArrayCopyFromTestCases + { + get + { + yield return new TestCaseData(100, new byte[] { 0x53, 0x56 }, new bool[] { true, false }, new string[] { "Sister Ray", "Venus in Furs" }, new int[] { 1, 2 }, new decimal[] { 132.13m, 23.22m }, new DateTime[] { new DateTime(1984, 8, 26), new DateTime(2000, 1, 2) }).SetName("Valid Array Copy From"); + yield return new TestCaseData(10, new byte[] { }, new bool[] { }, new string[] { }, new int[] { }, new decimal[] { }, new DateTime[] { }).SetName("Empty Array Copy From"); + yield return new TestCaseData(10, null, null, null, null, null, null).SetName("Null Array Copy From"); + } + } + [Test] - [TestCase(100, new byte[] { 0x53, 0x56 })] - [TestCase(10, new byte[] { })] - [TestCase(10, null)] - public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) + [TestCaseSource(nameof(PostgresArrayCopyFromTestCases))] + public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea, bool[] cBooleanArray, string[] cTextArray, int[] cIntegerArray, decimal[] cDecimalArray, DateTime[] cTimestampArray) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea, CBooleanArray = cBooleanArray, CTextArray = cTextArray, CIntegerArray = cIntegerArray, CDecimalArray = cDecimalArray, CTimestampArray = cTimestampArray }).ToList(); await QuerySql.InsertPostgresArrayTypesBatch(batchArgs); var expected = new QuerySql.GetPostgresArrayTypesCntRow { Cnt = batchSize, - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }; var actual = await QuerySql.GetPostgresArrayTypesCnt(); AssertSingularEquals(expected, actual.Value); @@ -944,6 +957,11 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); + Assert.That(x.CBooleanArray, Is.EqualTo(y.CBooleanArray)); + Assert.That(x.CTextArray, Is.EqualTo(y.CTextArray)); + Assert.That(x.CIntegerArray, Is.EqualTo(y.CIntegerArray)); + Assert.That(x.CDecimalArray, Is.EqualTo(y.CDecimalArray)); + Assert.That(x.CTimestampArray, Is.EqualTo(y.CTimestampArray)); } } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs index 9fa5851a..3ff92ed6 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlDapperTester.generated.cs @@ -925,18 +925,31 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.Get } } + private static IEnumerable PostgresArrayCopyFromTestCases + { + get + { + yield return new TestCaseData(100, new byte[] { 0x53, 0x56 }, new bool[] { true, false }, new string[] { "Sister Ray", "Venus in Furs" }, new int[] { 1, 2 }, new decimal[] { 132.13m, 23.22m }, new DateTime[] { new DateTime(1984, 8, 26), new DateTime(2000, 1, 2) }).SetName("Valid Array Copy From"); + yield return new TestCaseData(10, new byte[] { }, new bool[] { }, new string[] { }, new int[] { }, new decimal[] { }, new DateTime[] { }).SetName("Empty Array Copy From"); + yield return new TestCaseData(10, null, null, null, null, null, null).SetName("Null Array Copy From"); + } + } + [Test] - [TestCase(100, new byte[] { 0x53, 0x56 })] - [TestCase(10, new byte[] { })] - [TestCase(10, null)] - public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) + [TestCaseSource(nameof(PostgresArrayCopyFromTestCases))] + public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea, bool[] cBooleanArray, string[] cTextArray, int[] cIntegerArray, decimal[] cDecimalArray, DateTime[] cTimestampArray) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea, CBooleanArray = cBooleanArray, CTextArray = cTextArray, CIntegerArray = cIntegerArray, CDecimalArray = cDecimalArray, CTimestampArray = cTimestampArray }).ToList(); await QuerySql.InsertPostgresArrayTypesBatch(batchArgs); var expected = new QuerySql.GetPostgresArrayTypesCntRow { Cnt = batchSize, - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }; var actual = await QuerySql.GetPostgresArrayTypesCnt(); AssertSingularEquals(expected, actual); @@ -944,6 +957,11 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); + Assert.That(x.CBooleanArray, Is.EqualTo(y.CBooleanArray)); + Assert.That(x.CTextArray, Is.EqualTo(y.CTextArray)); + Assert.That(x.CIntegerArray, Is.EqualTo(y.CIntegerArray)); + Assert.That(x.CDecimalArray, Is.EqualTo(y.CDecimalArray)); + Assert.That(x.CTimestampArray, Is.EqualTo(y.CTimestampArray)); } } diff --git a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs index 221d1ed8..2f985f5e 100644 --- a/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs +++ b/end2end/EndToEndTestsLegacy/NpgsqlTester.generated.cs @@ -925,18 +925,31 @@ void AssertSingularEquals(QuerySql.GetPostgresNetworkTypesCntRow x, QuerySql.Get } } + private static IEnumerable PostgresArrayCopyFromTestCases + { + get + { + yield return new TestCaseData(100, new byte[] { 0x53, 0x56 }, new bool[] { true, false }, new string[] { "Sister Ray", "Venus in Furs" }, new int[] { 1, 2 }, new decimal[] { 132.13m, 23.22m }, new DateTime[] { new DateTime(1984, 8, 26), new DateTime(2000, 1, 2) }).SetName("Valid Array Copy From"); + yield return new TestCaseData(10, new byte[] { }, new bool[] { }, new string[] { }, new int[] { }, new decimal[] { }, new DateTime[] { }).SetName("Empty Array Copy From"); + yield return new TestCaseData(10, null, null, null, null, null, null).SetName("Null Array Copy From"); + } + } + [Test] - [TestCase(100, new byte[] { 0x53, 0x56 })] - [TestCase(10, new byte[] { })] - [TestCase(10, null)] - public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea) + [TestCaseSource(nameof(PostgresArrayCopyFromTestCases))] + public async Task TestArrayCopyFrom(int batchSize, byte[] cBytea, bool[] cBooleanArray, string[] cTextArray, int[] cIntegerArray, decimal[] cDecimalArray, DateTime[] cTimestampArray) { - var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea }).ToList(); + var batchArgs = Enumerable.Range(0, batchSize).Select(_ => new QuerySql.InsertPostgresArrayTypesBatchArgs { CBytea = cBytea, CBooleanArray = cBooleanArray, CTextArray = cTextArray, CIntegerArray = cIntegerArray, CDecimalArray = cDecimalArray, CTimestampArray = cTimestampArray }).ToList(); await QuerySql.InsertPostgresArrayTypesBatch(batchArgs); var expected = new QuerySql.GetPostgresArrayTypesCntRow { Cnt = batchSize, - CBytea = cBytea + CBytea = cBytea, + CBooleanArray = cBooleanArray, + CTextArray = cTextArray, + CIntegerArray = cIntegerArray, + CDecimalArray = cDecimalArray, + CTimestampArray = cTimestampArray }; var actual = await QuerySql.GetPostgresArrayTypesCnt(); AssertSingularEquals(expected, actual); @@ -944,6 +957,11 @@ void AssertSingularEquals(QuerySql.GetPostgresArrayTypesCntRow x, QuerySql.GetPo { Assert.That(x.Cnt, Is.EqualTo(y.Cnt)); Assert.That(x.CBytea, Is.EqualTo(y.CBytea)); + Assert.That(x.CBooleanArray, Is.EqualTo(y.CBooleanArray)); + Assert.That(x.CTextArray, Is.EqualTo(y.CTextArray)); + Assert.That(x.CIntegerArray, Is.EqualTo(y.CIntegerArray)); + Assert.That(x.CDecimalArray, Is.EqualTo(y.CDecimalArray)); + Assert.That(x.CTimestampArray, Is.EqualTo(y.CTimestampArray)); } } diff --git a/examples/NpgsqlDapperExample/QuerySql.cs b/examples/NpgsqlDapperExample/QuerySql.cs index f8118418..69d2337f 100644 --- a/examples/NpgsqlDapperExample/QuerySql.cs +++ b/examples/NpgsqlDapperExample/QuerySql.cs @@ -1333,10 +1333,15 @@ public class GetPostgresArrayTypesRow return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_timestamp_array) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresArrayTypesBatchArgs { public byte[]? CBytea { get; init; } + public bool[]? CBooleanArray { get; init; } + public string[]? CTextArray { get; init; } + public int[]? CIntegerArray { get; init; } + public decimal[]? CDecimalArray { get; init; } + public DateTime[]? CTimestampArray { get; init; } }; public async Task InsertPostgresArrayTypesBatch(List args) { @@ -1349,6 +1354,11 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() diff --git a/examples/NpgsqlDapperExample/request.json b/examples/NpgsqlDapperExample/request.json index caac45f6..5b1759e1 100644 --- a/examples/NpgsqlDapperExample/request.json +++ b/examples/NpgsqlDapperExample/request.json @@ -35520,7 +35520,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "text": "INSERT INTO postgres_array_types (\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\n) \nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6\n)", "name": "InsertPostgresArrayTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -35538,6 +35538,91 @@ }, "originalName": "c_bytea" } + }, + { + "number": 2, + "column": { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + } + }, + { + "number": 3, + "column": { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + } } ], "filename": "query.sql", @@ -35546,7 +35631,7 @@ } }, { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "text": "SELECT\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\nLIMIT 1", "name": "GetPostgresArrayTypesCnt", "cmd": ":one", "columns": [ @@ -35561,6 +35646,75 @@ }, "originalName": "c_bytea" }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + }, + { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + }, { "name": "cnt", "notNull": true, diff --git a/examples/NpgsqlDapperExample/request.message b/examples/NpgsqlDapperExample/request.message index ab300659..76a9fa7f 100644 --- a/examples/NpgsqlDapperExample/request.message +++ b/examples/NpgsqlDapperExample/request.message @@ -10773,17 +10773,56 @@ pg_catalogint4zc_integer_array pg_catalognumericzc_decimal_arrayИ"L c_date_array 0         Rpostgres_array_typesbdatez c_date_arrayИ"g c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampzc_timestamp_arrayИ: query.sql╤ -6INSERT INTO postgres_array_types (c_bytea) VALUES ($1)InsertPostgresArrayTypesBatch :copyfrom*JF -c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea: query.sqlBpostgres_array_typesы -^SELECT +pg_catalog timestampzc_timestamp_arrayИ: query.sqlБ +╙INSERT INTO postgres_array_types ( c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +)InsertPostgresArrayTypesBatch :copyfrom*JF +c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea*ie +c_boolean_array 0         Rpublicpostgres_array_typesbpg_catalog.boolzc_boolean_arrayИ*XT + c_text_array 0         Rpublicpostgres_array_typesbtextz c_text_arrayИ*ie +c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh +c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*rn +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesЦ +оSELECT + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array, COUNT(*) AS cnt FROM postgres_array_types GROUP BY - c_bytea + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array LIMIT 1GetPostgresArrayTypesCnt:one"> -c_bytea0         Rpostgres_array_typesbbyteazc_bytea" +c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ +c_boolean_array 0         Rpostgres_array_typesb +pg_catalogboolzc_boolean_arrayИ"L + c_text_array 0         Rpostgres_array_typesbtextz c_text_arrayИ"^ +c_integer_array 0         Rpostgres_array_typesb +pg_catalogint4zc_integer_arrayИ"a +c_decimal_array 0         Rpostgres_array_typesb +pg_catalognumericzc_decimal_arrayИ"g +c_timestamp_array 0         Rpostgres_array_typesb +pg_catalog timestampzc_timestamp_arrayИ" cnt0         @bbigint: query.sqlS #TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ л diff --git a/examples/NpgsqlDapperLegacyExample/QuerySql.cs b/examples/NpgsqlDapperLegacyExample/QuerySql.cs index 9fb25e0b..73dda028 100644 --- a/examples/NpgsqlDapperLegacyExample/QuerySql.cs +++ b/examples/NpgsqlDapperLegacyExample/QuerySql.cs @@ -1334,10 +1334,15 @@ public async Task GetPostgresArrayTypes() return await this.Transaction.Connection.QueryFirstOrDefaultAsync(GetPostgresArrayTypesSql, transaction: this.Transaction); } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_timestamp_array) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresArrayTypesBatchArgs { public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; public async Task InsertPostgresArrayTypesBatch(List args) { @@ -1350,6 +1355,11 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() diff --git a/examples/NpgsqlDapperLegacyExample/request.json b/examples/NpgsqlDapperLegacyExample/request.json index 9db0a4eb..17f34e7c 100644 --- a/examples/NpgsqlDapperLegacyExample/request.json +++ b/examples/NpgsqlDapperLegacyExample/request.json @@ -35520,7 +35520,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "text": "INSERT INTO postgres_array_types (\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\n) \nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6\n)", "name": "InsertPostgresArrayTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -35538,6 +35538,91 @@ }, "originalName": "c_bytea" } + }, + { + "number": 2, + "column": { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + } + }, + { + "number": 3, + "column": { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + } } ], "filename": "query.sql", @@ -35546,7 +35631,7 @@ } }, { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "text": "SELECT\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\nLIMIT 1", "name": "GetPostgresArrayTypesCnt", "cmd": ":one", "columns": [ @@ -35561,6 +35646,75 @@ }, "originalName": "c_bytea" }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + }, + { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + }, { "name": "cnt", "notNull": true, diff --git a/examples/NpgsqlDapperLegacyExample/request.message b/examples/NpgsqlDapperLegacyExample/request.message index d8e8b880..44b9c189 100644 --- a/examples/NpgsqlDapperLegacyExample/request.message +++ b/examples/NpgsqlDapperLegacyExample/request.message @@ -10773,17 +10773,56 @@ pg_catalogint4zc_integer_array pg_catalognumericzc_decimal_arrayИ"L c_date_array 0         Rpostgres_array_typesbdatez c_date_arrayИ"g c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampzc_timestamp_arrayИ: query.sql╤ -6INSERT INTO postgres_array_types (c_bytea) VALUES ($1)InsertPostgresArrayTypesBatch :copyfrom*JF -c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea: query.sqlBpostgres_array_typesы -^SELECT +pg_catalog timestampzc_timestamp_arrayИ: query.sqlБ +╙INSERT INTO postgres_array_types ( c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +)InsertPostgresArrayTypesBatch :copyfrom*JF +c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea*ie +c_boolean_array 0         Rpublicpostgres_array_typesbpg_catalog.boolzc_boolean_arrayИ*XT + c_text_array 0         Rpublicpostgres_array_typesbtextz c_text_arrayИ*ie +c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh +c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*rn +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesЦ +оSELECT + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array, COUNT(*) AS cnt FROM postgres_array_types GROUP BY - c_bytea + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array LIMIT 1GetPostgresArrayTypesCnt:one"> -c_bytea0         Rpostgres_array_typesbbyteazc_bytea" +c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ +c_boolean_array 0         Rpostgres_array_typesb +pg_catalogboolzc_boolean_arrayИ"L + c_text_array 0         Rpostgres_array_typesbtextz c_text_arrayИ"^ +c_integer_array 0         Rpostgres_array_typesb +pg_catalogint4zc_integer_arrayИ"a +c_decimal_array 0         Rpostgres_array_typesb +pg_catalognumericzc_decimal_arrayИ"g +c_timestamp_array 0         Rpostgres_array_typesb +pg_catalog timestampzc_timestamp_arrayИ" cnt0         @bbigint: query.sqlS #TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ л diff --git a/examples/NpgsqlExample/QuerySql.cs b/examples/NpgsqlExample/QuerySql.cs index a0fc16be..2e689fbf 100644 --- a/examples/NpgsqlExample/QuerySql.cs +++ b/examples/NpgsqlExample/QuerySql.cs @@ -1990,8 +1990,8 @@ public async Task InsertPostgresArrayTypes(InsertPostgresArrayTypesArgs args) return null; } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; - public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea); + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_timestamp_array) FROM STDIN (FORMAT BINARY)"; + public readonly record struct InsertPostgresArrayTypesBatchArgs(byte[]? CBytea, bool[]? CBooleanArray, string[]? CTextArray, int[]? CIntegerArray, decimal[]? CDecimalArray, DateTime[]? CTimestampArray); public async Task InsertPostgresArrayTypesBatch(List args) { using (var connection = new NpgsqlConnection(ConnectionString)) @@ -2003,6 +2003,11 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() { if (this.Transaction == null) @@ -2029,7 +2034,12 @@ public async Task InsertPostgresArrayTypesBatch(List(0), - Cnt = reader.GetInt64(1) + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CTimestampArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + Cnt = reader.GetInt64(6) }; } } @@ -2052,7 +2062,12 @@ public async Task InsertPostgresArrayTypesBatch(List(0), - Cnt = reader.GetInt64(1) + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CTimestampArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + Cnt = reader.GetInt64(6) }; } } diff --git a/examples/NpgsqlExample/request.json b/examples/NpgsqlExample/request.json index e3ef486e..570cc70b 100644 --- a/examples/NpgsqlExample/request.json +++ b/examples/NpgsqlExample/request.json @@ -35520,7 +35520,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "text": "INSERT INTO postgres_array_types (\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\n) \nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6\n)", "name": "InsertPostgresArrayTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -35538,6 +35538,91 @@ }, "originalName": "c_bytea" } + }, + { + "number": 2, + "column": { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + } + }, + { + "number": 3, + "column": { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + } } ], "filename": "query.sql", @@ -35546,7 +35631,7 @@ } }, { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "text": "SELECT\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\nLIMIT 1", "name": "GetPostgresArrayTypesCnt", "cmd": ":one", "columns": [ @@ -35561,6 +35646,75 @@ }, "originalName": "c_bytea" }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + }, + { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + }, { "name": "cnt", "notNull": true, diff --git a/examples/NpgsqlExample/request.message b/examples/NpgsqlExample/request.message index 7fbb6982..37e54019 100644 --- a/examples/NpgsqlExample/request.message +++ b/examples/NpgsqlExample/request.message @@ -10773,17 +10773,56 @@ pg_catalogint4zc_integer_array pg_catalognumericzc_decimal_arrayИ"L c_date_array 0         Rpostgres_array_typesbdatez c_date_arrayИ"g c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampzc_timestamp_arrayИ: query.sql╤ -6INSERT INTO postgres_array_types (c_bytea) VALUES ($1)InsertPostgresArrayTypesBatch :copyfrom*JF -c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea: query.sqlBpostgres_array_typesы -^SELECT +pg_catalog timestampzc_timestamp_arrayИ: query.sqlБ +╙INSERT INTO postgres_array_types ( c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +)InsertPostgresArrayTypesBatch :copyfrom*JF +c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea*ie +c_boolean_array 0         Rpublicpostgres_array_typesbpg_catalog.boolzc_boolean_arrayИ*XT + c_text_array 0         Rpublicpostgres_array_typesbtextz c_text_arrayИ*ie +c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh +c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*rn +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesЦ +оSELECT + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array, COUNT(*) AS cnt FROM postgres_array_types GROUP BY - c_bytea + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array LIMIT 1GetPostgresArrayTypesCnt:one"> -c_bytea0         Rpostgres_array_typesbbyteazc_bytea" +c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ +c_boolean_array 0         Rpostgres_array_typesb +pg_catalogboolzc_boolean_arrayИ"L + c_text_array 0         Rpostgres_array_typesbtextz c_text_arrayИ"^ +c_integer_array 0         Rpostgres_array_typesb +pg_catalogint4zc_integer_arrayИ"a +c_decimal_array 0         Rpostgres_array_typesb +pg_catalognumericzc_decimal_arrayИ"g +c_timestamp_array 0         Rpostgres_array_typesb +pg_catalog timestampzc_timestamp_arrayИ" cnt0         @bbigint: query.sqlS #TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ л diff --git a/examples/NpgsqlLegacyExample/QuerySql.cs b/examples/NpgsqlLegacyExample/QuerySql.cs index 3ee1fd51..e52ec5c5 100644 --- a/examples/NpgsqlLegacyExample/QuerySql.cs +++ b/examples/NpgsqlLegacyExample/QuerySql.cs @@ -2290,10 +2290,15 @@ public async Task GetPostgresArrayTypes() return null; } - private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea) FROM STDIN (FORMAT BINARY)"; + private const string InsertPostgresArrayTypesBatchSql = "COPY postgres_array_types (c_bytea, c_boolean_array, c_text_array, c_integer_array, c_decimal_array, c_timestamp_array) FROM STDIN (FORMAT BINARY)"; public class InsertPostgresArrayTypesBatchArgs { public byte[] CBytea { get; set; } + public bool[] CBooleanArray { get; set; } + public string[] CTextArray { get; set; } + public int[] CIntegerArray { get; set; } + public decimal[] CDecimalArray { get; set; } + public DateTime[] CTimestampArray { get; set; } }; public async Task InsertPostgresArrayTypesBatch(List args) { @@ -2306,6 +2311,11 @@ public async Task InsertPostgresArrayTypesBatch(List GetPostgresArrayTypesCnt() @@ -2336,7 +2351,12 @@ public async Task GetPostgresArrayTypesCnt() return new GetPostgresArrayTypesCntRow { CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CTimestampArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + Cnt = reader.GetInt64(6) }; } } @@ -2359,7 +2379,12 @@ public async Task GetPostgresArrayTypesCnt() return new GetPostgresArrayTypesCntRow { CBytea = reader.IsDBNull(0) ? null : reader.GetFieldValue(0), - Cnt = reader.GetInt64(1) + CBooleanArray = reader.IsDBNull(1) ? null : reader.GetFieldValue(1), + CTextArray = reader.IsDBNull(2) ? null : reader.GetFieldValue(2), + CIntegerArray = reader.IsDBNull(3) ? null : reader.GetFieldValue(3), + CDecimalArray = reader.IsDBNull(4) ? null : reader.GetFieldValue(4), + CTimestampArray = reader.IsDBNull(5) ? null : reader.GetFieldValue(5), + Cnt = reader.GetInt64(6) }; } } diff --git a/examples/NpgsqlLegacyExample/request.json b/examples/NpgsqlLegacyExample/request.json index 0231a546..8e122bc0 100644 --- a/examples/NpgsqlLegacyExample/request.json +++ b/examples/NpgsqlLegacyExample/request.json @@ -35520,7 +35520,7 @@ "filename": "query.sql" }, { - "text": "INSERT INTO postgres_array_types (c_bytea) VALUES ($1)", + "text": "INSERT INTO postgres_array_types (\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\n) \nVALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6\n)", "name": "InsertPostgresArrayTypesBatch", "cmd": ":copyfrom", "parameters": [ @@ -35538,6 +35538,91 @@ }, "originalName": "c_bytea" } + }, + { + "number": 2, + "column": { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + } + }, + { + "number": 3, + "column": { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + } + }, + { + "number": 4, + "column": { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + } + }, + { + "number": 5, + "column": { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + } + }, + { + "number": 6, + "column": { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "schema": "public", + "name": "postgres_array_types" + }, + "type": { + "name": "pg_catalog.timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + } } ], "filename": "query.sql", @@ -35546,7 +35631,7 @@ } }, { - "text": "SELECT\n c_bytea,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea\nLIMIT 1", + "text": "SELECT\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array,\n COUNT(*) AS cnt\nFROM postgres_array_types\nGROUP BY\n c_bytea,\n c_boolean_array,\n c_text_array,\n c_integer_array,\n c_decimal_array,\n c_timestamp_array\nLIMIT 1", "name": "GetPostgresArrayTypesCnt", "cmd": ":one", "columns": [ @@ -35561,6 +35646,75 @@ }, "originalName": "c_bytea" }, + { + "name": "c_boolean_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "bool" + }, + "originalName": "c_boolean_array", + "arrayDims": 1 + }, + { + "name": "c_text_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "name": "text" + }, + "originalName": "c_text_array", + "arrayDims": 1 + }, + { + "name": "c_integer_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "int4" + }, + "originalName": "c_integer_array", + "arrayDims": 1 + }, + { + "name": "c_decimal_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "numeric" + }, + "originalName": "c_decimal_array", + "arrayDims": 1 + }, + { + "name": "c_timestamp_array", + "isArray": true, + "length": -1, + "table": { + "name": "postgres_array_types" + }, + "type": { + "schema": "pg_catalog", + "name": "timestamp" + }, + "originalName": "c_timestamp_array", + "arrayDims": 1 + }, { "name": "cnt", "notNull": true, diff --git a/examples/NpgsqlLegacyExample/request.message b/examples/NpgsqlLegacyExample/request.message index f81d5a47..de7cc006 100644 --- a/examples/NpgsqlLegacyExample/request.message +++ b/examples/NpgsqlLegacyExample/request.message @@ -10773,17 +10773,56 @@ pg_catalogint4zc_integer_array pg_catalognumericzc_decimal_arrayИ"L c_date_array 0         Rpostgres_array_typesbdatez c_date_arrayИ"g c_timestamp_array 0         Rpostgres_array_typesb -pg_catalog timestampzc_timestamp_arrayИ: query.sql╤ -6INSERT INTO postgres_array_types (c_bytea) VALUES ($1)InsertPostgresArrayTypesBatch :copyfrom*JF -c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea: query.sqlBpostgres_array_typesы -^SELECT +pg_catalog timestampzc_timestamp_arrayИ: query.sqlБ +╙INSERT INTO postgres_array_types ( c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +)InsertPostgresArrayTypesBatch :copyfrom*JF +c_bytea0         Rpublicpostgres_array_typesbbyteazc_bytea*ie +c_boolean_array 0         Rpublicpostgres_array_typesbpg_catalog.boolzc_boolean_arrayИ*XT + c_text_array 0         Rpublicpostgres_array_typesbtextz c_text_arrayИ*ie +c_integer_array 0         Rpublicpostgres_array_typesbpg_catalog.int4zc_integer_arrayИ*lh +c_decimal_array 0         Rpublicpostgres_array_typesbpg_catalog.numericzc_decimal_arrayИ*rn +c_timestamp_array 0         Rpublicpostgres_array_typesbpg_catalog.timestampzc_timestamp_arrayИ: query.sqlBpostgres_array_typesЦ +оSELECT + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array, COUNT(*) AS cnt FROM postgres_array_types GROUP BY - c_bytea + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array LIMIT 1GetPostgresArrayTypesCnt:one"> -c_bytea0         Rpostgres_array_typesbbyteazc_bytea" +c_bytea0         Rpostgres_array_typesbbyteazc_bytea"^ +c_boolean_array 0         Rpostgres_array_typesb +pg_catalogboolzc_boolean_arrayИ"L + c_text_array 0         Rpostgres_array_typesbtextz c_text_arrayИ"^ +c_integer_array 0         Rpostgres_array_typesb +pg_catalogint4zc_integer_arrayИ"a +c_decimal_array 0         Rpostgres_array_typesb +pg_catalognumericzc_decimal_arrayИ"g +c_timestamp_array 0         Rpostgres_array_typesb +pg_catalog timestampzc_timestamp_arrayИ" cnt0         @bbigint: query.sqlS #TRUNCATE TABLE postgres_array_typesTruncatePostgresArrayTypes:exec: query.sql▒ л diff --git a/examples/config/postgresql/types/query.sql b/examples/config/postgresql/types/query.sql index ed18aeee..2738dc59 100644 --- a/examples/config/postgresql/types/query.sql +++ b/examples/config/postgresql/types/query.sql @@ -306,15 +306,40 @@ VALUES ($1, $2, $3, $4, $5, $6, $7); SELECT * FROM postgres_array_types LIMIT 1; -- name: InsertPostgresArrayTypesBatch :copyfrom -INSERT INTO postgres_array_types (c_bytea) VALUES ($1); +INSERT INTO postgres_array_types ( + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +); -- name: GetPostgresArrayTypesCnt :one SELECT c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array, COUNT(*) AS cnt FROM postgres_array_types GROUP BY - c_bytea + c_bytea, + c_boolean_array, + c_text_array, + c_integer_array, + c_decimal_array, + c_timestamp_array LIMIT 1; -- name: TruncatePostgresArrayTypes :exec From 8d4aef2ab66017c2725318288acb19d97d8f1809 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sat, 23 Aug 2025 23:24:54 +0200 Subject: [PATCH 32/33] fix: update Postgres data types doc --- docs/04_Postgres.md | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/docs/04_Postgres.md b/docs/04_Postgres.md index a3861b7d..22f9451b 100644 --- a/docs/04_Postgres.md +++ b/docs/04_Postgres.md @@ -46,7 +46,7 @@ we consider support for the different data types separately for batch inserts an | varchar, character varying | тЬЕ | тЬЕ | | text | тЬЕ | тЬЕ | | bytea | тЬЕ | тЬЕ | -| 2-dimensional arrays (e.g text[],int[]) | тЬЕ | тЭМ | +| 2-dimensional arrays (e.g text[],int[]) | тЬЕ | тЬЕ | | money | тЬЕ | тЬЕ | | point | тЬЕ | тЬЕ | | line | тЬЕ | тЬЕ | @@ -58,19 +58,28 @@ we consider support for the different data types separately for batch inserts an | cidr | тЬЕ | тЬЕ | | inet | тЬЕ | тЬЕ | | macaddr | тЬЕ | тЬЕ | -| macaddr8 | тЬЕ | тЭМ | -| tsvector | тЬЕ | тЭМ | -| tsquery | тЬЕ | тЭМ | +| macaddr8 | тЬЕ | тЪая╕П | +| tsvector | тЬЕ | тЭМ | +| tsquery | тЬЕ | тЭМ | | uuid | тЬЕ | тЬЕ | -| json | тЬЕ | тЭМ | -| jsonb | тЬЕ | тЭМ | -| jsonpath | тЬЕ | тЭМ | -| xml | тЬЕ | тЭМ | -| enum | тЬЕ | тЭМ | +| json | тЬЕ | тЪая╕П | +| jsonb | тЬЕ | тЪая╕П | +| jsonpath | тЬЕ | тЪая╕П | +| xml | тЬЕ | тЪая╕П | +| enum | тЬЕ | тЪая╕П | *** `time with time zone` is not useful and not recommended to use by Postgres themselves - see [here](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME) - so we decided not to implement support for it. +*** Arguments data type conversion in the INSERT statement, and SQLC disallows argument conversion in queries with `:copyfrom` annotation, used for batch inserts. +These are the data types that require this conversion: +1. `macaddr8` +2. `json` +3. `jsonb` +4. `jsonpath` +5. `xml` +6. `enum` + From d50fe933b2a0366d94904b6e149a42f8a7e6ec91 Mon Sep 17 00:00:00 2001 From: Ilan Uzan Date: Sun, 24 Aug 2025 12:23:40 +0200 Subject: [PATCH 33/33] fix: Update Postgrs doc with data type conversion example --- docs/04_Postgres.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/04_Postgres.md b/docs/04_Postgres.md index 22f9451b..fa0389a4 100644 --- a/docs/04_Postgres.md +++ b/docs/04_Postgres.md @@ -72,8 +72,8 @@ we consider support for the different data types separately for batch inserts an see [here](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME) - so we decided not to implement support for it. -*** Arguments data type conversion in the INSERT statement, and SQLC disallows argument conversion in queries with `:copyfrom` annotation, used for batch inserts. -These are the data types that require this conversion: +*** Some data types require conversion in the INSERT statement, and SQLC disallows argument conversion in queries with `:copyfrom` annotation, +which are used for batch inserts. These are the data types that require this conversion: 1. `macaddr8` 2. `json` 3. `jsonb` @@ -81,5 +81,10 @@ These are the data types that require this conversion: 5. `xml` 6. `enum` +An example of this conversion: +```sql +INSERT INTO tab1 (json_field) VALUES (sqlc.narg('json_field')::json); +``` +